glGenBuffers access violation exception - c++

I'm using GLFW and GLEW.
But during initialization of my object glGenBuffers throws an exception
void Character::init2D(glm::vec3 top, glm::vec3 bottom_left, glm::vec3 bottom_right)
{
glm::vec3 Vertices[3];
Vertices[0] = bottom_left;
Vertices[1] = top;
Vertices[2] = bottom_right;
this->top = top;
this->left_front = bottom_left;
this->right_front = bottom_right;
glGenBuffers(1, &VBO); //throws an exception 0xC0000005: Access violation
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);
CompileShaders(shaderProgram, "vertex.shader", "fragment.shader");
}
I declare my class Character like this
#include <GL\glew.h>
#include <GLFW\glfw3.h>
#include <glm\glm.hpp>
#include <glm\gtc\type_ptr.hpp>
#pragma comment(lib, "glfw3.lib")
#pragma comment(lib, "glew32.lib")
class Character
{
private:
glm::vec3 top,
left_front,
right_front,
left_back,
right_back;
GLuint VBO;
GLuint shaderProgram;
public:
Character();
void init2D(glm::vec3 top,
glm::vec3 bottom_left,
glm::vec3 bottom_right);
void draw();
void move();
void action();
~Character() {};
};
And my main.cpp looks like this
#include <iostream>
#include "character.h"
#define WIDTH 600
#define HEIGHT 600
using namespace std;
Character simple;
void render()
{
simple.draw();
}
int main(int argc, char** argv)
{
GLFWwindow *window;
if (!glewInit())
exit(EXIT_FAILURE);
if (!glfwInit())
exit(EXIT_FAILURE);
window = glfwCreateWindow(WIDTH, HEIGHT, "Imensia", NULL, NULL);
if (!window) { glfwTerminate(); exit(EXIT_FAILURE); }
glm::vec3 left(-0.5, 0, 0);
glm::vec3 top(0, 0.5, 0);
glm::vec3 right(0.5, 0, 0);
simple.init2D(top, left, right);
glfwMakeContextCurrent(window);
while(!glfwWindowShouldClose(window))
{
glViewport(0, 0, WIDTH, HEIGHT);
glClear(GL_COLOR_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(-1, 1, -1, 1, 1, -1);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
render();
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
return 0;
};
Is it problem with initialization or what?
In properties of the project I set include and library directories...

Glew initialization should be done after the windows/opengl context is created.
For the given code, glewInit could be moved right below the glfwCreateWindow:
int main(int argc, char** argv)
{
GLFWwindow *window;
if (!glfwInit())
exit(EXIT_FAILURE);
window = glfwCreateWindow(WIDTH, HEIGHT, "Imensia", NULL, NULL);
if (!window) { glfwTerminate(); exit(EXIT_FAILURE); }
if (!glewInit())
exit(EXIT_FAILURE);
:
:
}

Keugyeols approach has helped me fix the problem. If you follow the tutorial mentioned on glfws docs, this may help you.
int main(int argc, char** argv){
GLFWwindow* window;
if (!glfwInit()) {
return -1;
}
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window) {
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
if(!glewInit()){
return 0;
}
/*your code
*
*/
}

Related

OpenGL: Segmentation Fault (Core Dumped) when running

I am writing a game engine in C++ and OpenGL and when I open my first test window i receive this issue:
Segmentation fault (core dumped)
There is no error when compiling, and it compiles just fine. The problem lies when I go to open the window.
HOW TO REPRODUCE:
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <iostream>
void framebuffer_size_callback(GLFWwindow* window, int width, int height);
void framebuffer_size_callback(GLFWwindow* window, int width, int height)
{
glViewport(0, 0, width, height);
}
int main()
{
typedef void (*GL_GENBUFFERS) (GLsizei, GLuint*);
GL_GENBUFFERS glGenBuffers = (GL_GENBUFFERS) glfwGetProcAddress("glGenBuffers");
unsigned int buffer;
glGenBuffers(1, &buffer);
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(800, 600, "BloodBunny", NULL, NULL);
if (window == NULL)
{
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
{
std::cout << "Failed to initialize GLAD" << std::endl;
return -1;
}
while(!glfwWindowShouldClose(window))
{
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
glViewport(0,0,800,600);
glfwTerminate();
return 0;
}
glfwGetProcAddress() will only (potentially, depending on the name requested and the GL version of the current context) return usable, non-NULL function-pointers if a GL context is current.
With GLFW you make a window's context current via glfwMakeContextCurrent() so if you truly want to grab your own pointer to glGenBuffers() you need to move the glfwGetProcAddress() call to after a successful glfwMakeContextCurrent():
...
glfwMakeContextCurrent(window);
...
typedef void (*GL_GENBUFFERS) (GLsizei, GLuint*);
GL_GENBUFFERS glGenBuffers = (GL_GENBUFFERS) glfwGetProcAddress("glGenBuffers");
unsigned int buffer;
glGenBuffers(1, &buffer);
...
...though honestly there's little point since you're calling gladLoadGLLoader() right after glfwMakeContextCurrent() which will populate a perfectly usable glGenBuffers() pointer at global scope all on its own.

Access violation when creating a mesh and launching the window SDL/GLEW C++

I am currently following along on an openGL course and when organising my code into classes, I am getting an access violation error along with this on the console terminal.
SDL window creation failed! (I programmed it to output that when the window cannot be created)
Here's my code:
main.cpp
#include <stdio.h>
#include <string.h>
#include <cmath>
#include <vector>
#include <GL/glew.h>
#define SDL_MAIN_HANDLED
#include <SDL/SDL.h>
#include <SDL/SDL_opengl.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
#include "Window.h"
#include "Mesh.h"
#include "Shader.h"
//Window dimensions
const GLint width = 800, height = 600;
const float toRadians = 3.14159265f / 180.0f;
Window mainWindow;
std::vector<Mesh*> meshList;
std::vector<Shader*> shaderList;
bool direction = true;
float triOffSet = 0.0f;
float triMaxOffSet = 0.7f;
float triIncrement = 0.010f;
float curAngle = 0.0f;
bool sizeDirection = true;
float curSize = 0.4f;
float maxSize = 0.8f;
float minSize = 0.1f;
//Vertex shader
static const char* vShader = "Shaders/shader.vert";
//Fragment shader
static const char* fShader = "Shaders/shader.frag";
void CreateObjects() {
unsigned int indices[] = {
0, 3, 1,
1, 3, 2,
2, 3, 0,
0, 1, 2
};
GLfloat vertices[] = {
-1.0f, -1.0f, 0.0f,
0.0f, -1.0f, 1.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f
};
Mesh* obj1 = new Mesh();
obj1->CreateMesh(vertices, indices, 12, 12);
meshList.push_back(obj1);
Mesh* obj2 = new Mesh();
obj2->CreateMesh(vertices, indices, 12, 12);
meshList.push_back(obj2);
Mesh* obj3 = new Mesh();
obj3->CreateMesh(vertices, indices, 12, 12);
meshList.push_back(obj3);
}
void CreateShaders() {
Shader *shader1 = new Shader();
shader1->CreateFromFiles(vShader, fShader);
shaderList.push_back(shader1);
}
int main() {
mainWindow = Window(800, 600);
mainWindow.Initialise();
CreateObjects();
CreateShaders();
GLuint uniformProjection = 0, uniformModel = 0;
glm::mat4 projection = glm::perspective(45.0f, (GLfloat)mainWindow.getBufferWidth() / mainWindow.getBufferHeight(), 0.1f, 100.0f);
SDL_Event windowEvent;
while (true) {
if (SDL_PollEvent(&windowEvent)) {
if (windowEvent.type == SDL_QUIT) {
break;
}
}
if (direction) {
triOffSet += triIncrement;
}
else {
triOffSet -= triIncrement;
}
if (abs(triOffSet) >= triMaxOffSet) {
direction = !direction;
}
curAngle += 1.0f;
if (curAngle >= 360) {
curAngle -= 360;
}
if (direction) {
curSize += 0.001f;
}
else {
curSize -= 0.001f;
}
if (curSize >= maxSize || curSize <= minSize) {
sizeDirection = !sizeDirection;
}
//Clear window
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
shaderList[0]->UseShader();
uniformModel = shaderList[0]->GetModelLocation();
uniformProjection = shaderList[0]->GetProjectionLocation();
glm::mat4 model(1.0f);
model = glm::translate(model, glm::vec3(0.0f, 0.0f, -2.5f));
model = glm::rotate(model, curAngle * toRadians, glm::vec3(0.0f, 1.0f, 0.0f));
model = glm::scale(model, glm::vec3(0.4f, 0.4f, 1.0f));
glUniformMatrix4fv(uniformModel, 1, GL_FALSE, glm::value_ptr(model));
glUniformMatrix4fv(uniformProjection, 1, GL_FALSE, glm::value_ptr(projection));
meshList[0]->RenderMesh();
model = glm::mat4(1.0f);
model = glm::translate(model, glm::vec3(-triOffSet, 1.0f, -2.5f));
model = glm::scale(model, glm::vec3(0.4f, 0.4f, 1.0f));
glUniformMatrix4fv(uniformModel, 1, GL_FALSE, glm::value_ptr(model));
meshList[1]->RenderMesh();
model = glm::mat4(1.0f);
model = glm::translate(model, glm::vec3(triOffSet, -1.0f, -2.5f));
model = glm::scale(model, glm::vec3(0.4f, 0.4f, 1.0f));
glUniformMatrix4fv(uniformModel, 1, GL_FALSE, glm::value_ptr(model));
meshList[2]->RenderMesh();
glUseProgram(0);
mainWindow.swapWindows();
}
return 0;
}
Mesh.h
#pragma once
#include <GL/glew.h>
class Mesh
{
private:
GLuint VAO, VBO, IBO;
GLsizei indexCount;
public:
Mesh();
void CreateMesh(GLfloat *vertices, unsigned int *indices, unsigned int numOfVertices, unsigned int numOfIndices);
void RenderMesh();
void ClearMesh();
~Mesh();
};
Mesh.cpp
#include "Mesh.h"
#include <GL/glew.h>
Mesh::Mesh() {
VAO = 0;
VBO = 0;
IBO = 0;
indexCount = 0;
}
void Mesh::CreateMesh(GLfloat* vertices, unsigned int* indices, unsigned int numOfVertices, unsigned int numOfIndices) {
indexCount = numOfIndices;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
glGenBuffers(1, &IBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, IBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices[0]) * numOfIndices, indices, GL_STATIC_DRAW);
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices[0]) * numOfVertices, vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
void Mesh::RenderMesh() {
glBindVertexArray(VAO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, IBO);
glDrawElements(GL_TRIANGLES, indexCount, GL_UNSIGNED_INT, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindVertexArray(0);
}
void Mesh::ClearMesh() {
if (IBO != 0) {
glDeleteBuffers(1, &IBO);
IBO = 0;
}
if (VBO != 0) {
glDeleteBuffers(1, &VBO);
VBO = 0;
}
if (VAO != 0) {
glDeleteBuffers(1, &VAO);
VAO = 0;
}
indexCount = 0;
}
Mesh::~Mesh() {
ClearMesh();
}
Shader.h
#pragma once
#include <stdio.h>
#include <string>
#include <iostream>
#include <fstream>
#include <GL/glew.h>
class Shader
{
private:
GLuint shaderID, uniformProjection, uniformModel;
void CompileShader(const char* vertexCode, const char* fragmentCode);
void AddShader(GLuint theProgram, const char* shaderCode, GLenum shaderType);
public:
Shader();
void CreateFromString(const char* vertexCode, const char* fragmentCode);
void CreateFromFiles(const char* vertexLocation, const char* fragmentLocation);
std::string ReadFile(const char* fileLocation);
GLuint GetProjectionLocation();
GLuint GetModelLocation();
void UseShader();
void ClearShader();
~Shader();
};
Shader.cpp
#include "Shader.h"
Shader::Shader()
{
shaderID = 0;
uniformModel = 0;
uniformProjection = 0;
}
void Shader::CreateFromString(const char* vertexCode, const char* fragmentCode)
{
CompileShader(vertexCode, fragmentCode);
}
void Shader::CreateFromFiles(const char* vertexLocation, const char* fragmentLocation)
{
std::string vertexString = ReadFile(vertexLocation);
std::string fragmentString = ReadFile(fragmentLocation);
const char* vertexCode = vertexString.c_str();
const char* fragmentCode = fragmentString.c_str();
CompileShader(vertexCode, fragmentCode);
}
std::string Shader::ReadFile(const char* fileLocation)
{
std::string content;
std::ifstream fileStream(fileLocation, std::ios::in);
if (!fileStream.is_open()) {
printf("Failed to read %s! File doesn't exist.", fileLocation);
return "";
}
std::string line = "";
while (!fileStream.eof())
{
std::getline(fileStream, line);
content.append(line + "\n");
}
fileStream.close();
return content;
}
void Shader::CompileShader(const char* vertexCode, const char* fragmentCode)
{
shaderID = glCreateProgram();
if (!shaderID)
{
printf("Error creating shader program!\n");
return;
}
AddShader(shaderID, vertexCode, GL_VERTEX_SHADER);
AddShader(shaderID, fragmentCode, GL_FRAGMENT_SHADER);
GLint result = 0;
GLchar eLog[1024] = { 0 };
glLinkProgram(shaderID);
glGetProgramiv(shaderID, GL_LINK_STATUS, &result);
if (!result)
{
glGetProgramInfoLog(shaderID, sizeof(eLog), NULL, eLog);
printf("Error linking program: '%s'\n", eLog);
return;
}
glValidateProgram(shaderID);
glGetProgramiv(shaderID, GL_VALIDATE_STATUS, &result);
if (!result)
{
glGetProgramInfoLog(shaderID, sizeof(eLog), NULL, eLog);
printf("Error validating program: '%s'\n", eLog);
return;
}
uniformProjection = glGetUniformLocation(shaderID, "projection");
uniformModel = glGetUniformLocation(shaderID, "model");
}
GLuint Shader::GetProjectionLocation()
{
return uniformProjection;
}
GLuint Shader::GetModelLocation()
{
return uniformModel;
}
void Shader::UseShader()
{
glUseProgram(shaderID);
}
void Shader::ClearShader()
{
if (shaderID != 0)
{
glDeleteProgram(shaderID);
shaderID = 0;
}
uniformModel = 0;
uniformProjection = 0;
}
void Shader::AddShader(GLuint theProgram, const char* shaderCode, GLenum shaderType)
{
GLuint theShader = glCreateShader(shaderType);
const GLchar* theCode[1];
theCode[0] = shaderCode;
GLint codeLength[1];
codeLength[0] = strlen(shaderCode);
glShaderSource(theShader, 1, theCode, codeLength);
glCompileShader(theShader);
GLint result = 0;
GLchar eLog[1024] = { 0 };
glGetShaderiv(theShader, GL_COMPILE_STATUS, &result);
if (!result)
{
glGetShaderInfoLog(theShader, sizeof(eLog), NULL, eLog);
printf("Error compiling the %d shader: '%s'\n", shaderType, eLog);
return;
}
glAttachShader(theProgram, theShader);
}
Shader::~Shader()
{
ClearShader();
}
Window.h
#pragma once
#include <stdio.h>
#include <GL/glew.h>
#include <SDL/SDL.h>
#include <SDL/SDL_opengl.h>
class Window
{
private:
SDL_Window* mainWindow;
SDL_Event windowEvent;
GLint width, height;
public:
Window();
Window(GLint windowWidth, GLint windowHeight);
int Initialise();
GLint getBufferWidth() { return width; }
GLint getBufferHeight() { return height; }
void swapWindows() { SDL_GL_SwapWindow(mainWindow); }
~Window();
};
Window.cpp
#include "Window.h"
Window::Window() {
width = 800;
height = 600;
}
Window::Window(GLint width, GLint height) {
width = width;
height = height;
}
int Window::Initialise() {
if (SDL_Init(SDL_INIT_VIDEO) < 0) {
printf("SDL initialisation failed\n");
SDL_Quit();
return 1;
}
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 3);
mainWindow = SDL_CreateWindow("Test game", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, width, height, SDL_WINDOW_OPENGL);
if (!mainWindow) {
printf("SDL window creation failed!\n");
SDL_Quit();
return 1;
}
//Set context for GLEW to use
SDL_GLContext context = SDL_GL_CreateContext(mainWindow);
//Allow modern extension features
glewExperimental = GL_TRUE;
if (glewInit() != GLEW_OK) {
printf("GLEW initialization failed!\n");
SDL_DestroyWindow(mainWindow);
SDL_Quit();
return 1;
}
glEnable(GL_DEPTH_TEST);
//Setup viewport size
glViewport(0, 0, 800, 600);
}
Window::~Window() {
SDL_DestroyWindow(mainWindow);
SDL_Quit();
}
I've heard that a solution to this could be to put glewExperimental = GL_True before glewInit() but that hasn't worked because those two lines are in Window.cpp and I don't know how that could affect Mesh.cpp.
The debug reads this at the bottom:
Exception thrown at 0x00000000 in OpenGLCourseApp.exe: 0xC0000005: Access violation executing location 0x00000000.
Call stack
Your Window constructor does not actually initialize the fields of the Window object, which means you call SDL_CreateWindow with random width and height.
Use an initializer list instead, which does not suffer this problem:
Window::Window(int width, int height)
: width(width), height(height) {
}
Once you get past that, keep in mind that the statement
mainWindow = Window(800, 600);
will create a temporary Window object, assign it to mainWindow, and then immediately destroy it!
I suggest you change the type of mainWindow to Window* (or better yet a std::unique_ptr<Window>),
and change that line to
mainWindow = new Window(800, 600);
or
mainWindow = std::make_unique<Window>(800, 600);

glew Missing GL version c++ using SDL2 2.0.5 and glew 2.0.0

I am following the tutorial "Making Games With Bens" (https://www.youtube.com/user/makinggameswithben) on OpenGL and I get this
missing gl version
every time I try to initialize glew.
I have reinstalled glew and SDL2 many times.
I don't understand what I'm doing wrong.
I am using glew 2.0.0 and SDL2 2.0.5
headerfile:
#pragma once
#include <string>
#include <vector>
//Graphics
#include <SDL2\SDL.h>
#include <GL\glew.h>
class Display
{
public://functions:
Display(int m_width, int m_heigth, const char title []);
void update();
~Display();
private:
public://variables:
std::vector<char> error;
int width;
int heigth;
private:
//Window pointer
SDL_Window * m_window;
};
.cpp file:
#include "Display.h"
Display::Display(int m_width, int m_heigth, const char title[])
{
m_window = nullptr;
width = m_width;
heigth = m_heigth;
//Initialize SDL
SDL_Init(SDL_INIT_EVERYTHING);
m_window = SDL_CreateWindow(title, SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, m_width, m_heigth, SDL_WINDOW_OPENGL);
//error 1:
if (m_window = nullptr) {
error.push_back(255);
}
//error 2:
SDL_GLContext glContext = SDL_GL_CreateContext(m_window);
if (glContext = nullptr) {
error.push_back(254);
}
const GLenum GLerror = glewInit();
//error 3:
if (GLerror != GLEW_OK) {
error.push_back(GLerror);
}
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
}
void Display::update()
{
glClearDepth(1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
SDL_GL_SwapWindow(m_window);
}
Display::~Display()
{
delete m_window;
SDL_Quit();
}
You may have to do:
glewExperimental = GL_TRUE;
right before
glewInit();
This will allow GLEW to use OpenGL features above OpenGL version 2.0.

SDL window not respondig

My SDL window crashes when clicking or minimizes it, the background is white and the OpenGL does not update the screen more. Follows the code:
#include <stdio.h>
#include <SDL.h>
#include <SDL_opengl.h>
const int SCREEN_WIDTH = 640;
const int SCREEN_HEIGHT = 480;
const char * SCREEN_NAME = "Tetris SDL";
SDL_Window *window;
SDL_GLContext glcontext;
int init();
void main_loop();
void quit();
void events();
void update();
void draw();
int main(int argc, char* args[])
{
if(!init())
quit();
main_loop();
quit();
}
int init()
{
if (SDL_Init(SDL_INIT_VIDEO) < 0)
return 0;
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_ACCUM_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ACCUM_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ACCUM_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ACCUM_ALPHA_SIZE, 0);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
window = SDL_CreateWindow(SCREEN_NAME, SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED,
SCREEN_WIDTH, SCREEN_HEIGHT, SDL_WINDOW_OPENGL);
if(!window)
return 0;
glcontext = SDL_GL_CreateContext(window);
glViewport(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT);
return 1;
}
void main_loop()
{
while(true)
{
events();
update();
draw();
//only for test
SDL_Delay(16);
}
}
void events(){}
void update(){}
void draw()
{
glClear(GL_COLOR_BUFFER_BIT);
glClearColor(0.0, 0.0, 0.0, 0.0);
glBegin(GL_TRIANGLES);
glColor3f(1,0,0); glVertex2f(0.0f,0.5f);
glColor3f(0,1,0); glVertex2f(-0.5f, -0.5f);
glColor3f(0,0,1); glVertex2f(0.5f, -0.5f);
glEnd();
SDL_GL_SwapWindow(window);
}
void quit()
{
SDL_GL_DeleteContext(glcontext);
SDL_DestroyWindow(window);
SDL_Quit();
}
I'm using SDL 2.0 and GNU GCC compiler and Windows XP. Does anyone know what could be the problem?
You don't handle any events while in the loop.
If you don't do that then the event of minimizing will not be handled (usually means the window buffer is freed to be use by someone else).

OpenGL - Triangle with a shader isn't showing

This program builds with no problem, and the executable starts up, but no triangle shows up. I am following a GLSL tutorial where a Shader class is made to handle GLSL files.
Shader.h
#ifndef SHADER_H_
#define SHADER_H_
#include <GL/glew.h>
#include <GL/glfw.h>
#include <string>
class Shader {
public:
Shader();
Shader(const char *vsFile, const char *fsFile);
~Shader();
void init(const char *vsFile, const char *fsFile);
void bind();
void unbind();
unsigned int id();
private:
unsigned int shader_id;
unsigned int shader_vp;
unsigned int shader_fp;
};
#endif // SHADER_H_
Shader.cpp
#include "Shader.h"
#include <cstring>
#include <iostream>
#include <ftream>
#include <cstdlib>
using namespace std;
static char* textFileRead(const char *fileName) {
char* text;
if (fileName != NULL) {
FILE *file = fopen(fileName, "rt");
if (file != NULL) {
fseek(file, 0, SEEK_END);
int count = ftell(file);
rewind(file);
if (count > 0) {
text = (char*)malloc(sizeof(char) * (count + 1));
count = fread(text, sizeof(char), count, file);
text[count] = '\0';
}
fclose(file);
}
}
return text;
}
Shader::Shader() {}
Shader::Shader(const char *vsFile, const char *fsFile)
{
init(vsFile, fsFile);
}
void Shader::init(const char *vsFile, const char *fsFile)
{
shader_vp = glCreateShader(GL_VERTEX_SHADER);
shader_fp = glCreateShader(GL_FRAGMENT_SHADER);
const char *vsText = textFileRead(vsFile);
const char *fsText = textFileRead(fsFile);
if (vsText == NULL || fsText == NULL)
{
cerr << "Either vertex shader or fragment shader file is not found" << endl;
return;
}
glShaderSource(shader_vp, 1, &vsText, 0);
glShaderSource(shader_fp, 1, &fsText, 0);
glCompileShader(shader_vp);
glCompileShader(shader_fp);
shader_id = glCreateProgram();
glAttachShader(shader_id, shader_fp);
glAttachShader(shader_id, shader_vp);
glLinkProgram(shader_id);
}
Shader::~Shader()
{
glDetachShader(shader_id, shader_fp);
glDetachShader(shader_id, shader_vp);
glDeleteShader(shader_fp);
glDeleteShader(shader_vp);
glDeleteShader(shader_id);
}
unsigned int Shader::id()
{
return shader_id;
}
void Shader::bind()
{
glUseProgram(shader_id);
}
void Shader::unbind()
{
glUseProgram(0);
}
Main.cpp
#include "Shader.h"
#include <cstdlib>
#include <iostream>
using namespace std;
Shader shader;
void init()
{
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
shader.init("shader.vert", "shader.frag");
}
void resize(int w, int h)
{
glViewport(0, 0, (GLsizei)w, (GLsizei)h);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(60, (GLfloat)w / (GLfloat)h, 1.0, 100.0);
glMatrixMode(GL_MODELVIEW);
}
int main()
{
int running = GL_TRUE;
// init GLFW
if (!glfwInit())
exit(EXIT_FAILURE);
if (!glfwOpenWindow(300, 300, 0, 0, 0, 0, 0, 0, GLFW_WINDOW))
{
glfwTerminate();
exit(EXIT_FAILURE);
}
glfwSetWindowTitle("ohhai.");
glfwSetWindowSizeCallback(resize);
/* CHECK GLEW */
GLenum err = glewInit();
if (GLEW_OK != err)
{
/* Problem: glewInit failed, something is seriously wrong. */
cout << "Error: " << glewGetErrorString(err) << endl;
}
cout << "Status: Using GLEW " << glewGetString(GLEW_VERSION) << endl;
if (!GLEW_ARB_vertex_buffer_object)
{
cerr << "VBO not supported\n";
exit(1);
}
init();
while (running)
{
glClear(GL_COLOR_BUFFER_BIT);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glShadeModel(GL_SMOOTH);
shader.bind();
glBegin(GL_TRIANGLES);
//glColor3f(0.2f, 0.5f, 0.54f);
glVertex2f(0.0f, 0.5f);
//glColor3f(0.75f, 0.8f, 0.1f);
glVertex2f(-.5f, -.5f);
//glColor3f(0.0f, 0.9f, 0.2f);
glVertex2f(0.5f, -0.5f);
glEnd();
shader.unbind();
glfwSwapBuffers();
running = !glfwGetKey(GLFW_KEY_ESC) && glfwGetWindowParam(GLFW_OPENED);
}
glfwTerminate();
exit(EXIT_SUCCESS);
}
shader.vert
void main()
{
// set the posistion of the current matrix
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
}
shader.frag
void main(void)
{
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
Again, when this compiles under g++, it goes through fine, but no triangle was shown.
I haven't found the part where you setup the camera (modelview matrix) with gluLookAt or glTranslate/glRotate/glScale. So you use the default, corresponding to a camera at the origin and looking into -z, thus your triangle (which lies in the z=0 plane) is behind the near plane.