Related
I'm trying to write my first game engine in C++ (I've arleady done it in java tho)
I created a basic mesh class, which holds an integer of GLuint for the vao/Vertex Arrays, and an array (for the moment of just a size of 2) for the Buffers/vbos,
When i try to call my constructor in the mesh class, and i call the function
glGenVertexArrays(1, &vaoId); The program crashes, on visual studio a box shows up saying
access violated during the path execution on 0x00000000
Mesh.cpp:
#include "Mesh.h"
Mesh::Mesh(GLuint vaoid, int verticeslength) : vaoId(vaoid),
verticesLength(verticeslength) {}
Mesh::Mesh(float vertices[]) {
this->verticesLength = sizeof(vertices) / sizeof(float); // set the length of the vertices
glGenVertexArrays(1, &vaoId); // create VAO
glBindVertexArray(vaoId); // bind VAO
glGenBuffers(1, &vboIds[0]); // allocate memory to VBO
glBindBuffer(GL_ARRAY_BUFFER, vboIds[0]); // bind vbo
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices) / sizeof(float),
vertices, GL_STATIC_DRAW); // store data in vbo
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0); // store vbo in vao
}
Mesh::~Mesh() {
glDisableVertexAttribArray(0); // disable the position vbo
glDeleteBuffers(2, vboIds); // delete the vbos
glDeleteVertexArrays(1, &vaoId); // delete the vbos
delete &vaoId;
delete &vboIds;
}
GLuint Mesh::getVaoId() { return vaoId; }
int Mesh::getVerticesLength() { return verticesLength; }
void Mesh::render() {
glBindVertexArray(vaoId);
glEnableVertexAttribArray(0);
glDrawArrays(GL_TRIANGLES, 0, verticesLength);
glDisableVertexAttribArray(0);
glBindVertexArray(0);
}
Mesh.h:
#ifndef Mesh_H
#define Mesh_H
#include <GL/glew.h>
class Mesh {
private:
int verticesLength;
GLuint vboIds[2]; // 0 = position, 1 = textureCoords
GLuint vaoId;
public:
Mesh(GLuint vaoId, int verticesLength);
Mesh(float vertices[]);
~Mesh();
int getVerticesLength();
GLuint getVaoId();
void render();
};
#endif Mesh
Main.cpp:
#include <iostream>
#include "Mesh.h"
#include <GLFW/glfw3.h>
#include "GlfwUtils.h"
#include "InputManager.h"
#define WIDTH 800
#define HEIGHT 600
bool initializeGLFW();
int main() {
if (!initializeGLFW()) return EXIT_FAILURE;
GLFWwindow *window = glfwCreateWindow(WIDTH, HEIGHT, "Scope Engine",
NULL, NULL);
glfwMakeContextCurrent(window);
if (!window) {
std::cout << "Window creation failed" << std::endl;
return EXIT_FAILURE;
}
glfwSetKeyCallback(window, InputManager::key_callback);
float vertices[] = {
-0.5f, 0.5f, 0,
-0.5f, -0.5f, 0,
0.5f, -0.5f, 0,
0.5f, -0.5f, 0,
0.5f, 0.5f, 0,
-0.5f, 0.5f, 0
};
Mesh* mesh = new Mesh(vertices); // gotta initalize the mesh!
while (!glfwWindowShouldClose(window)) {
mesh->render();
std::cout << "Game Loop!" << std::endl;
GlfwUtils::UpdateDisplay(window);
}
delete mesh;
glfwDestroyWindow(window);
return EXIT_SUCCESS;
}
bool initializeGLFW() {
glewExperimental = GL_TRUE;
if (!glewInit()) {
std::cout << "Couldn't initalize OpenGL" << std::endl;
return false;
}
GLenum error = glGetError();
if (error != GL_NO_ERROR) { std::cout << "OpenGL error: " << error << std::endl; }
if (!glfwInit()) {
std::cout << "Couldn't initalize GLFW" << std::endl;
return false;
}
glfwSetErrorCallback(GlfwUtils::error_callBack);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
return true;
}
is it something that has to do with the driver, with the linker or did i made an error in my code?
The GLEW library has to be initialized, by glewInit, after the OpenGL context has become current by glfwMakeContextCurrent.
See Initializing GLEW.
First make the OpenGL context current and then init GLEW:
glfwMakeContextCurrent(window);
if (!window) {
std::cout << "Window creation failed" << std::endl;
return EXIT_FAILURE;
}
glewExperimental = GL_TRUE;
if (glewInit() != GLEW_OK) {
std::cout << "Couldn't initalize OpenGL" << std::endl;
return false;
}
It doesn't make sens to call any OpenGL instruction before the OpenGL constex is made current.
Remove GLenum error = glGetError(); from initializeGLFW.
In the constructor Mesh::Mesh(float vertices[]), sizeof(vertices) is not the size of the array (this is not java). It is the size of an pointer to an array, which is 8 at a 64-bit system.
Use std::vector:
#include <vector>
std::vector<float> vertices{
-0.5f, 0.5f, 0,
-0.5f, -0.5f, 0,
0.5f, -0.5f, 0,
0.5f, -0.5f, 0,
0.5f, 0.5f, 0,
-0.5f, 0.5f, 0
};
Mesh *mesh = new Mesh(vertices);
class Mesh {
private:
int noOfVertices;
// [...]
public:
Mesh::Mesh(const std::vector<float> &vertices);
// [...]
};
Mesh::Mesh(const std::vector<float> &vertices) {
// [...]
noOfVertices = (int)vertices.size() / 3;
glBufferData(GL_ARRAY_BUFFER,
vertices.size()*sizeof(float), vertices.data(), GL_STATIC_DRAW);
}
The number of elements in a std::vector van be get by std::vector::size a pointer to the content of can be get by std::vector::data.
In your case, each vertex coordinate consists of 3 components (x, y and z), so the number of coordinates is vertices.size() / 3.
The 2nd parameter to glBufferData has to be the size of the buffer in bytes, which is vertices.size() * sizeof(float).
I'm trying to apply textures to opengl program, but the program just crashes for some reason, can someone tell me what I did wrong? Trying to fix this problem for two days now :/ works fine when color in fragment shader is set to some random color, but when color is:
vec4 textColor = texture(u_Texture, v_TextCoord);
color = textColor;
Edited:
#define GLEW_STATIC
#define alloca __builtin_alloca
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <fstream>
#include <sstream>
#include <stb_image.h>
using namespace std;
static string ParseShader(const string& filepath);
static unsigned int CompileShader(unsigned int type, const string& source);
static unsigned int CreateShader(const string& vertexShader, const string& fragmentShader);
void glCheckError();
float vertex[] =
{
0.5f, 0.5f, 0.0f,
1.0f, 1.0f, // 0
0.5f, -0.5f, 0.0f,
1.0f, 0.0f, // 1
-0.5f, -0.5f, 0.0f,
0.0f, 0.0f, // 2
-0.5f, 0.5f, 0.0f,
0.0f, 1.0f // 3
};
float vertexBack[] =
{
-0.6, -0.6f, -0.5f, // 0
0.6f, -0.6f, -0.5f, // 1
0.6f, 0.6f, -0.5f, // 2
-0.6f, 0.6f, -0.5f // 3
};
unsigned int indexes[] =
{
0, 1, 3, // first triangle
1, 2, 3 // second triangle
};
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
{
cout << "GLFW failed to load!" << endl;
return -1;
}
else
cout << "GLFW loaded" << endl << endl;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(800, 600, "Window", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
if(glewInit() != GLEW_OK)
cout << "Error loading GLEW" << endl;
else
cout << "GLEW loaded - version: " << glGetString(GL_VERSION) << endl;
unsigned int shader = CreateShader(ParseShader("shaders/vertex.shader"), ParseShader("shaders/fragment.shader"));
glUseProgram(shader);
// Make vertex position array
unsigned int buffers;
glGenBuffers(1, &buffers);
glBindBuffer(GL_ARRAY_BUFFER, buffers);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertex), vertex, GL_STATIC_DRAW);
// Make vertex position indexes out of arrays
unsigned int ib;
glGenBuffers(1, &ib);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ib);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indexes), indexes, GL_STATIC_DRAW);
// Position layout
unsigned int posAttrib = glGetAttribLocation(shader, "position");
glEnableVertexAttribArray(posAttrib);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 5, 0);
// Texture layout
unsigned int texAttrib = glGetAttribLocation(shader, "texCoord");
glEnableVertexAttribArray(texAttrib);
glCheckError();
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 5, (void*)(3 * sizeof(float)));
// Load image
int h, w, v_bpp;
unsigned char *image = stbi_load("texture.png", &w, &h, &v_bpp, 4);
if(image == nullptr)
cout << "failed to load image!" << endl;
unsigned int texture_id;
glGenTextures(1, &texture_id);
glBindTexture(GL_TEXTURE_2D, texture_id);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, w, h, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
glGenerateMipmap(GL_TEXTURE_2D);
stbi_image_free(image);
// Set slot
glActiveTexture(GL_TEXTURE0);
int location = glGetUniformLocation(shader, "u_Texture");
glUniform1i(location, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindTexture(GL_TEXTURE_2D, 0);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window) && glfwGetKey(window, GLFW_KEY_ESCAPE ) != GLFW_PRESS)
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
//glDrawArrays(GL_TRIANGLES, 0, 6);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
glCheckError();
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwTerminate();
return 0;
}
static string ParseShader(const string& filepath) {
ifstream stream(filepath);
string line;
stringstream code;
while(getline(stream, line))
code << line << "\n";
return code.str();
}
static unsigned int CompileShader(unsigned int type, const string& source) {
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
glShaderSource(id, 1, &src, NULL);
glCompileShader(id);
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if(result == GL_FALSE)
{
int length;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
char* message = (char*)alloca(length * sizeof(char));
glGetShaderInfoLog(id, length, &length, message);
cout << "Failed to compile " << (type == GL_VERTEX_SHADER ? "Vertex" : "Fragment") << " shader!" << endl;
cout << message << endl;
glDeleteShader(id);
return 0;
}
return id;
}
static unsigned int CreateShader(const string& vertexShader, const string& fragmentShader) {
unsigned int program = glCreateProgram();
unsigned int vs = CompileShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER, fragmentShader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
void glCheckError() {
GLenum err = glGetError();
while(err!=GL_NO_ERROR) {
string error;
switch(err) {
case GL_INVALID_OPERATION: error="INVALID_OPERATION"; break;
case GL_INVALID_ENUM: error="INVALID_ENUM"; break;
case GL_INVALID_VALUE: error="INVALID_VALUE"; break;
case GL_OUT_OF_MEMORY: error="OUT_OF_MEMORY"; break;
case GL_INVALID_FRAMEBUFFER_OPERATION: error="INVALID_FRAMEBUFFER_OPERATION"; break;
}
cerr << err << " (" << error.c_str() << ")" <<endl;
err=glGetError();
}
}
You totally screwed up your vertex attribute ponter setup:
// Position layout
unsigned int posAttrib = glGetAttribLocation(shader, "position");
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 5, 0);
First, you query for an attribute named position, but you never use that index, you instead use 0.
// Texture layout
unsigned int texAttrib = glGetAttribLocation(shader, "texCoord");
glEnableVertexAttribArray(1);
glCheckError();
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 5, (void*)(3 * sizeof(float)));
Then, you query for texCoord, ignore it again, enable attribute array 1, and overwrite the attribute pointer for attribute 0.
THis means that attribute 1 has some undefined pointer value, and you have undefined behavior.
What most likely happens is that you are lucky when you not use the texture, because you only have one active attribute which happens to get location 0. And your GL implementation seems to ignore attribute arrays for non-active attributes, so it does not crash. When you have both attributes enables, chances are that they will be 0 and 1, and there is no way not to dereference that invalid pointer.
So first of all, i am a total beginner in open gl, and i am learning opengl from learnopengl.com. So i understood whats going on in the code in the main method.The code works in main method. But when i tried to create a class for it, it doesn't render anything!! I have absolutely no idea whats going wrong!
Sprite.h
#pragma once
#include"Shader.h"
#include<GL\glew.h>
class Sprite {
GLuint vao;
GLuint vbo;
int vertexCount;
Shader shader;
public:
Sprite(const GLfloat vertices[9], int noOfVertices, Shader s = Shader()) : vertexCount(noOfVertices) , shader(s)
{
glGenVertexArrays(1, &vao);
glGenBuffers(1, &vbo);
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, this->vertexCount, GL_FLOAT, GL_FALSE, 0, nullptr);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
}
void draw() {
glBindVertexArray(this->vao);
glDrawArrays(GL_TRIANGLES, 0, this->vertexCount);
glBindVertexArray(0);
}
};
Shader.h
#pragma once
#include<iostream>
#include<fstream>
#include<sstream>
#include<string>
#include<vector>
#include<GL\glew.h>
class Shader {
GLuint programID;
GLuint vertID;
GLuint fragID;
public:
Shader(std::string vertex = "Shaders/Dvert.vert", std::string fragment = "Shaders/Dfrag.frag")
{
this->programID = glCreateProgram();
this->vertID = glCreateShader(GL_VERTEX_SHADER);
this->fragID = glCreateShader(GL_FRAGMENT_SHADER);
this->CompileShaders(vertex, fragment);
this->LinkProgram();
}
GLuint getProgramId() {
return this->programID;
}
private:
void CompileShaders(std::string vertexShaderPath, std::string FragShaderPath) {
std::string vCode = GetContentsFrom(vertexShaderPath).c_str();
const char * vertexCode = vCode.c_str();
glShaderSource(this->vertID, 1, &vertexCode, NULL);
glCompileShader(this->vertID);
this->HandleShaderErrors(this->vertID);
std::string fCode = GetContentsFrom(FragShaderPath).c_str();
const char * fragCode = fCode.c_str();
glShaderSource(this->fragID, 1, &fragCode, NULL);
glCompileShader(this->fragID);
std::cout << fragCode << std::endl;
this->HandleShaderErrors(this->fragID);
}
void LinkProgram() {
glAttachShader(this->programID, this->vertID);
glAttachShader(this->programID, this->fragID);
glLinkProgram(this->programID);
this->HandleLinkErrors();
}
std::string GetContentsFrom(std::string path) {
std::ifstream file(path);
std::ostringstream stream;
if (!file) {
std::cout << "Specified file path is invalid" << std::endl;
return "";
}
stream << file.rdbuf();
std::cout << stream.str() << std::endl << std::endl;
return stream.str();
}
void HandleLinkErrors() {
int result;
glGetProgramiv(this->programID, GL_LINK_STATUS, &result);
if (result == GL_FALSE) {
int length;
glGetProgramiv(this->programID, GL_INFO_LOG_LENGTH, &length);
std::vector<char> error(length);
glGetProgramInfoLog(this->programID, length , &length, &error[0]);
std::cout << &error[0] << std::endl;
glDetachShader(this->programID, this->vertID);
glDetachShader(this->programID, this->fragID);
}
}
void HandleShaderErrors(GLuint shader) {
int status;
glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
if (status == GL_FALSE) {
int length;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &length);
std::vector<char> error(length);
glGetShaderInfoLog(shader, length, &length, &error[0]);
std::cout << &error[0] << std::endl;
glDeleteShader(shader);
}
}
};
Main.cpp
#include<GL\glew.h>
#include<GLFW\glfw3.h>
#include"Console.h"
#include"Headers\Shader.h"
#include"Headers\Sprite.h"
#include<iostream>
static const GLfloat vertices[] = {
0.5f, -0.5f ,0.0f ,
-0.5f, -0.5f, 0.0f,
0.0f, 0.5f, 0.0f
};
int main() {
if (!glfwInit()) {
Console.Log("GLFW could not be initialised!!");
Console.Wait();
}
GLFWwindow *window = glfwCreateWindow(600, 400, "Open GL", nullptr, nullptr);
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK) {
Console.Log("GLEW could not be initialised!!");
Console.Wait();
}
Sprite s(vertices, 3);
glClearColor(0, 0, 0, 1);
glfwWindowHint(GLFW_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_VERSION_MINOR, 3);
while (!glfwWindowShouldClose(window)) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glfwPollEvents();
s.draw();
glfwSwapBuffers(window);
}
}
Please help me out!!
Note:- The code in the main method renders the white triangle without the
shaders,so my graphics card works without shaders.
Also the Console class i have created is just to print to the console and has nothing to do with the opengl stuff. Console.Log isstd::cout<< and Console.Wait is std::cin.get();
There are at least two problems. The first is that your sizeof(vertices) in Sprite produces wrong result, because it gives the size of a pointer, not the size of the array. To fix it, replace it with sizeof(GLfloat) * 9.
The second problem is that you are specifying the OpenGL context version too late, after creating the window and context. To fix it, call these methods before glfwCreateWindow, not after:
glfwWindowHint(GLFW_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_VERSION_MINOR, 3);
I'm using OpenGL 4.4 with SDL2. I am trying to render a simple triangle with the vertices (-1, -1, 0), (1, -1, 0), (0, 1, 0). However, when I think I'm doing everything correctly, nothing is drawn.
I extracted and reorganized the relevant code from my project:
#include <cerrno>
#include <cstring>
#include <exception>
#include <fstream>
#include <iostream>
#include <string>
#include <GL/glew.h>
#include <GL/glu.h>
#include <SDL2/SDL.h>
#include <SDL2/SDL_opengl.h>
void init();
void cleanUp();
std::string loadShader(std::string filepath);
void checkShaderSuccess(GLuint shader);
SDL_Window* win;
SDL_GLContext glContext;
GLuint program, vertShader, fragShader, vao, vbo;
class GenError: public std::exception {
public:
GenError():
exception(), msg("") {}
GenError(const std::string& m):
exception(), msg(m) {}
virtual ~GenError() throw() {}
virtual const char* what() const throw() {
return msg.c_str();
}
private:
std::string msg;
};
int main() {
init();
program = glCreateProgram();
if (program == 0) {
throw GenError("Shader creation failed: "
"Could not find valid memory location in "
"constructor");
}
vertShader = glCreateShader(GL_VERTEX_SHADER);
fragShader = glCreateShader(GL_FRAGMENT_SHADER);
if (vertShader == 0 || fragShader == 0) {
std::string m;
m += "Shader creation failed: "
"Could not find valid memory location when "
"adding shader: ";
m += (char *)gluErrorString(glGetError());
throw GenError(m);
}
std::cout << "Creating vertex shader..." << std::endl;
std::string data = loadShader("./shaders/basicVertex.vs");
const GLchar* data_c = data.c_str();
glShaderSource(vertShader, 1, &data_c, NULL);
glCompileShader(vertShader);
checkShaderSuccess(vertShader);
glAttachShader(program, vertShader);
std::cout << "Vertex shader created" << std::endl;
std::cout << "Creating fragment shader..." << std::endl;
data = loadShader("./shaders/basicFragment.fs");
data_c = data.c_str();
glShaderSource(fragShader, 1, &data_c, NULL);
glCompileShader(fragShader);
checkShaderSuccess(fragShader);
glAttachShader(program, fragShader);
std::cout << "Fragment shader created" << std::endl;
glLinkProgram(program);
GLint success;
glGetProgramiv(program, GL_LINK_STATUS, &success);
if (success == GL_FALSE) {
GLint logLen = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLen);
GLchar programLog[logLen];
glGetProgramInfoLog(program, logLen, &logLen, programLog);
std::string m;
m += "Failed to link program: ";
m += (char *)gluErrorString(glGetError());
m += ": ";
m += (char *)programLog;
throw GenError(m);
}
glValidateProgram(program);
glGetProgramiv(program, GL_VALIDATE_STATUS, &success);
if (success == GL_FALSE) {
GLint logLen = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLen);
GLchar programLog[logLen];
glGetProgramInfoLog(program, logLen, &logLen, programLog);
std::string m;
m += "Failed to validate program: ";
m += (char *)gluErrorString(glGetError());
m += ": ";
m += (char *)programLog;
throw GenError(m);
}
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &vbo);
const GLfloat verts[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f
};
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER,
sizeof(verts),
verts,
GL_STATIC_DRAW );
SDL_Event ev;
bool running = true;
while (true) {
while (SDL_PollEvent(&ev)) {
if (ev.type == SDL_WINDOWEVENT &&
ev.window.event == SDL_WINDOWEVENT_CLOSE) {
std::cout << "Closing window..." << std::endl;
running = false;
break;
}
}
if (!running) break;
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(program);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0,
3,
GL_FLOAT,
GL_FALSE,
3*sizeof(GLfloat),
(GLvoid*)0 );
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
SDL_GL_SwapWindow(win);
}
std::cout << "Window closed" << std::endl;
glDeleteBuffers(1, &vbo);
glDeleteVertexArrays(1, &vao);
glDeleteProgram(program);
glDeleteShader(vertShader);
glDeleteShader(fragShader);
cleanUp();
return 0;
}
void init() {
std::cout << "Initializing..." << std::endl;
if (SDL_Init(SDL_INIT_VIDEO) != 0) {
std::string m;
m.append("Error initializing SDL2: ");
m.append(SDL_GetError());
throw GenError(m);
}
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 4);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 4);
win = SDL_CreateWindow("Triangle Test",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
800, 600,
SDL_WINDOW_OPENGL );
if (win == NULL) {
throw GenError(SDL_GetError());
}
glContext = SDL_GL_CreateContext(win);
if (glContext == NULL) {
std::string m;
m.append("Error associating window with OpenGL: SDL Error: ");
m.append(SDL_GetError());
throw GenError(m);
}
glewExperimental = GL_TRUE;
GLenum glewErr = glewInit();
if (glewErr != GLEW_OK) {
std::string m;
m.append("Error initializing OpenGL GLEW extension: ");
m.append((const char*)glewGetErrorString(glewErr));
throw GenError(m);
} else {
/* GLEW does not play nice with OpenGL 4.4.
* GLEW thinks OpenGL 4.4 is "pretentious" and
* "entitled". GLEW likes to throw an invalid
* enumerant error the next time glGetError is
* called after GLEW's initialization.
* glGetError must be envoked to discard this
* faulty error. GLEW makes my code look sloppy.
* We do not like GLEW. We tolerate GLEW.
*/
GLenum junk = glGetError();
}
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glFrontFace(GL_CW);
glCullFace(GL_BACK);
glEnable(GL_CULL_FACE);
glEnable(GL_DEPTH_TEST);
glEnable(GL_FRAMEBUFFER_SRGB);
if(SDL_GL_SetSwapInterval(1) < 0) {
std::cerr << "Warning: Unable to set VSync! "
<< "SDL Error: "
<< SDL_GetError() << std::endl;
}
GLenum error = glGetError();
if (error != GL_NO_ERROR) {
std::string m;
m.append("Error initializing OpenGL: OpenGL Error: ");
m.append(reinterpret_cast<const char*>(gluErrorString(error)));
throw GenError(m);
}
std::cout << "Initialized" << std::endl;
}
void cleanUp() {
std::cout << "Cleaning up..." << std::endl;
SDL_GL_DeleteContext(glContext);
SDL_DestroyWindow(win);
SDL_Quit();
std::cout << "Cleaned" << std::endl;
}
std::string loadShader(std::string filepath) {
std::ifstream shaderFile(filepath.c_str());
if (!shaderFile.is_open()) {
std::cerr << "Could not load shader: "
<< "Error opening "
<< filepath
<< ": " << std::strerror(errno)
<< std::endl;
return std::string("");
}
std::string content, line;
while (std::getline(shaderFile, line)) {
content += line + '\n';
}
shaderFile.close();
return content;
}
void checkShaderSuccess(GLuint shader) {
GLint success;
glGetShaderiv(shader, GL_COMPILE_STATUS, &success);
if (success == GL_FALSE) {
GLint logLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLen);
GLchar shaderLog[logLen];
glGetShaderInfoLog(shader, logLen, &logLen, shaderLog);
std::string m;
m += "Shader compilation failed: ";
m += (char *)gluErrorString(glGetError());
m += ": ";
m += (char *)shaderLog;
glDeleteShader(shader);
throw GenError(m);
}
}
...without error catching (for faster skimming):
#include <cerrno>
#include <cstring>
#include <exception>
#include <fstream>
#include <iostream>
#include <string>
#include <GL/glew.h>
#include <GL/glu.h>
#include <SDL2/SDL.h>
#include <SDL2/SDL_opengl.h>
void init();
void cleanUp();
std::string loadShader(std::string filepath);
SDL_Window* win;
SDL_GLContext glContext;
GLuint program, vertShader, fragShader, vao, vbo;
int main() {
init();
program = glCreateProgram();
vertShader = glCreateShader(GL_VERTEX_SHADER);
fragShader = glCreateShader(GL_FRAGMENT_SHADER);
std::cout << "Creating vertex shader..." << std::endl;
std::string data = loadShader("./shaders/basicVertex.vs");
const GLchar* data_c = data.c_str();
glShaderSource(vertShader, 1, &data_c, NULL);
glCompileShader(vertShader);
glAttachShader(program, vertShader);
std::cout << "Vertex shader created" << std::endl;
std::cout << "Creating fragment shader..." << std::endl;
data = loadShader("./shaders/basicFragment.fs");
data_c = data.c_str();
glShaderSource(fragShader, 1, &data_c, NULL);
glCompileShader(fragShader);
glAttachShader(program, fragShader);
std::cout << "Fragment shader created" << std::endl;
glLinkProgram(program);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &vbo);
const GLfloat verts[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f
};
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER,
sizeof(verts),
verts,
GL_STATIC_DRAW );
SDL_Event ev;
bool running = true;
while (true) {
while (SDL_PollEvent(&ev)) {
if (ev.type == SDL_WINDOWEVENT &&
ev.window.event == SDL_WINDOWEVENT_CLOSE) {
std::cout << "Closing window..." << std::endl;
running = false;
break;
}
}
if (!running) break;
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(program);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0,
3,
GL_FLOAT,
GL_FALSE,
3*sizeof(GLfloat),
(GLvoid*)0 );
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
SDL_GL_SwapWindow(win);
}
std::cout << "Window closed" << std::endl;
glDeleteBuffers(1, &vbo);
glDeleteVertexArrays(1, &vao);
glDeleteProgram(program);
glDeleteShader(vertShader);
glDeleteShader(fragShader);
cleanUp();
return 0;
}
void init() {
std::cout << "Initializing..." << std::endl;
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 4);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 4);
win = SDL_CreateWindow("Triangle Test",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
800, 600,
SDL_WINDOW_OPENGL );
glContext = SDL_GL_CreateContext(win);
glewExperimental = GL_TRUE;
GLenum glewErr = glewInit();
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glFrontFace(GL_CW);
glCullFace(GL_BACK);
glEnable(GL_CULL_FACE);
glEnable(GL_DEPTH_TEST);
glEnable(GL_FRAMEBUFFER_SRGB);
std::cout << "Initialized" << std::endl;
}
void cleanUp() {
std::cout << "Cleaning up..." << std::endl;
SDL_GL_DeleteContext(glContext);
SDL_DestroyWindow(win);
SDL_Quit();
std::cout << "Cleaned" << std::endl;
}
std::string loadShader(std::string filepath) {
std::ifstream shaderFile(filepath.c_str());
std::string content, line;
while (std::getline(shaderFile, line)) {
content += line + '\n';
}
shaderFile.close();
return content;
}
...my vertex shader (GLSL):
#version 440
layout (location = 0) in vec3 position;
void main() {
gl_Position = vec4(0.5 * position, 1.0);
}
...and my fragment shader:
#version 440
out vec4 fragColor;
void main() {
fragColor = vec4(0.0, 1.0, 1.0, 1.0);
}
Now oddly enough, when I change line 148 in my C++ code (with error catching) from this...
3*sizeof(GLfloat),
...to this (in other words, changing the stride)...
3*sizeof(GLdouble),
...compiling and running produces a triangle with the vertices (-1, -1, 0), (0, 0, 0), (0, 1, 0). The second vertex is apparently getting obscured. Instead of an isosceles triangle, I get a scalene triangle.
I would like to 1) figure out how to fix my program so that it displays a triangle with the specified vertices, and 2) understand what I did wrong initially that caused my such an odd result when modifying the aforementioned line of code.
I have been tinkering with this for almost a week. Any insight is appreciated. Thanks!
Your code has a problem with the winding order of the polygons. You specify clockwise winding for the front faces, and enable culling of the back faces:
glFrontFace(GL_CW);
glCullFace(GL_BACK);
But the triangle has counter-clockwise winding order:
const GLfloat verts[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f
};
This means that the triangle will be eliminated by culling.
Using counter-clockwise winding is mostly standard in OpenGL, and is also the default. So the best option is that you simply remove this line of code:
glFrontFace(GL_CW);
This will leave the value at GL_CCW, which matches your geometry.
Disabling backface culling is always one of the first things you should do when polygons don't show up. Having the wrong winding order is one of the most common causes of things not rendering, and it's very easy to triage by simply disabling culling, and checking if that makes the geometry show up.
I am trying to render a simple Triangle in OpenGL ES 2.x using c++ and SDL 2.
But the glCreateShader and glCreatProgram returning zero.
Bellow Is the code I am using
#include "SDL.h"
#include <OpenGLES/ES2/gl.h>
#include <OpenGLES/ES2/glext.h>
#include <iostream>
#include <string>
#include <memory>
using namespace std;
GLuint programObject;
class Graphics
{
private:
SDL_Window* _window;
public:
Graphics(SDL_Window* window)
{
_window = window;
}
void update()
{
glClearColor(255.0f, 0.0f, 255.0f, 1);
// glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
GLfloat vVertices[] = {
0.0f, 0.5f, 0.0f,
-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f
};
glViewport (0, 0, 320, 480);
glColorMask (GL_TRUE, GL_TRUE, GL_TRUE, GL_FALSE);
glClear (GL_COLOR_BUFFER_BIT);
glUseProgram (programObject);
glVertexAttribPointer (0, 3, GL_FLOAT, GL_FALSE, 0, vVertices);
glEnableVertexAttribArray (0);
glDrawArrays (GL_TRIANGLES, 0, 3);
SDL_GL_SwapWindow(_window);
}
};
void UpdateFrame(void* param)
{
Graphics* graphics = (Graphics*)param;
graphics->update();
}
///
// Create a shader object, load the shader source, and
// compile the shader.
//
GLuint LoadShader(GLenum type, const GLchar *shaderSrc)
{
GLuint shader;
GLint compiled;
// Create the shader object
shader = glCreateShader(type);
if(shader == 0)
{
cerr << "Could not create OpenGL shader " << endl;
return 0;
}
// Load the shader source
glShaderSource(shader, 1, &shaderSrc, NULL);
// Compile the shader
glCompileShader(shader);
// Check the compile status
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if(!compiled)
{
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if(infoLen > 1)
{
char* infoLog = (char*)malloc(sizeof(char) * infoLen);
glGetShaderInfoLog(shader, infoLen, NULL, infoLog);
SDL_Log("Error compiling shader:\n%s\n", infoLog);
free(infoLog);
}
glDeleteShader(shader);
return 0;
}
return shader;
}
///
// Initialize the shader and program object
//
int Init()
{
const char vertexShaderString[] =
"attribute vec4 vPosition; \n"
"void main() \n"
"{ \n"
" gl_Position = vPosition; \n"
"} \n";
const char fragmentShaderString[] =
"precision mediump float;\n"\
"void main() \n"
"{ \n"
" gl_FragColor = vec4 ( 1.0, 0.0, 0.0, 1.0 );\n"
"} \n";
GLuint vertexShader;
GLuint fragmentShader;
GLint linked;
vertexShader = LoadShader (GL_VERTEX_SHADER, vertexShaderString);
fragmentShader = LoadShader (GL_FRAGMENT_SHADER, fragmentShaderString);
programObject = glCreateProgram ();
if (programObject == 0) {
cerr << "Could not create OpenGL program" << endl;
return 0;
}
glAttachShader (programObject, vertexShader);
glAttachShader (programObject, fragmentShader);
glBindAttribLocation (programObject, 0, "vPosition");
glLinkProgram (programObject);
glGetProgramiv (programObject, GL_LINK_STATUS, &linked);
if (!linked) {
GLint infoLen = 0;
glGetProgramiv (programObject, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen > 1) {
char* infoLog = (char*) malloc (sizeof (char) * infoLen);
glGetProgramInfoLog (programObject, infoLen, NULL, infoLog);
cerr << "Error linking program: " << infoLog << endl;
free (infoLog);
}
glDeleteProgram (programObject);
return 0;
}
glClearColor (0.0f, 0.0f, 0.0f, 1.0f);
return true;
}
int EventFilter(void* userdata, SDL_Event* event)
{
switch (event->type)
{
case SDL_FINGERMOTION:
SDL_Log("Finger Motion");
return 0;
case SDL_FINGERDOWN:
SDL_Log("Finger Down");
return 0;
case SDL_FINGERUP:
SDL_Log("Finger Up");
return 0;
}
return 1;
}
int main(int argc, char** argv)
{
/* initialize SDL */
if (SDL_Init(SDL_INIT_VIDEO) < 0)
{
printf("Could not initialize SDL\n");
return 1;
}
SDL_DisplayMode displayMode;
SDL_GetDesktopDisplayMode(0, &displayMode);
/* create window and renderer */
SDL_Window* window = SDL_CreateWindow(NULL, 0, 0, displayMode.w, displayMode.h, SDL_WINDOW_OPENGL | SDL_WINDOW_FULLSCREEN | SDL_WINDOW_RESIZABLE);
if (!window) {
printf("Could not initialize Window\n");
return 1;
}
auto gl = SDL_GL_CreateContext(window);
if (!Init()) {
cerr << "Error initializing OpenGL" << endl;
return 1;
}
unique_ptr<Graphics> graphics = unique_ptr<Graphics>(new Graphics(window));
SDL_iPhoneSetAnimationCallback(window, 1, UpdateFrame, graphics.get());
SDL_AddEventWatch(EventFilter, NULL);
//Game Loop
SDL_Event event;
auto done = false;
while (!done)
{
SDL_PumpEvents();
while (SDL_PollEvent(&event))
{
switch (event.type)
{
case SDL_QUIT:
done = true;
break;
case SDL_APP_DIDENTERFOREGROUND:
SDL_Log("SDL_APP_DIDENTERFOREGROUND");
break;
case SDL_APP_DIDENTERBACKGROUND:
SDL_Log("SDL_APP_DIDENTERBACKGROUND");
break;
case SDL_APP_LOWMEMORY:
SDL_Log("SDL_APP_LOWMEMORY");
break;
case SDL_APP_TERMINATING:
SDL_Log("SDL_APP_TERMINATING");
break;
case SDL_APP_WILLENTERBACKGROUND:
SDL_Log("SDL_APP_WILLENTERBACKGROUND");
break;
case SDL_APP_WILLENTERFOREGROUND:
SDL_Log("SDL_APP_WILLENTERFOREGROUND");
break;
case SDL_WINDOWEVENT:
{
switch (event.window.event)
{
case SDL_WINDOWEVENT_RESIZED:
{
SDL_Log("Window %d resized to %dx%d", event.window.windowID, event.window.data1, event.window.data2);
break;
}
}
}
}
}
}
SDL_GL_DeleteContext(gl);
// Done! Close the window, clean-up and exit the program.
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
Maybe I am missing something here, any help would be appreciated .
Thanks,
Khaled
This is most likely caused by a problem with the context created by SDL, specifically there were no attributes for the MAJOR/MINOR and MASK attributes which tend to be more important for iOS, OS X etc
What needs to be done is as follows. Before the creation of the SDL_Window do the following:
/* Set the correct attributes for MASK and MAJOR version */
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_ES);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 2);
/* create window and renderer */
SDL_Window* window = SDL_CreateWindow(NULL, 0, 0, displayMode.w, displayMode.h, SDL_WINDOW_OPENGL | SDL_WINDOW_FULLSCREEN | SDL_WINDOW_RESIZABLE);
Another situation which can cause the same error output is calling glCreateShader or glCreateProgram inside of a glBegin / glEnd block.