Access violation when using glDebugMessageCallback() with SDL? - c++

It seems that I cant use glDebugMessageCallback function, the throws an Access violation error on the very next line of code.
ERROR: Exception thrown at 0x0000000000000000 in DEBUG.exe:
0xC0000005: Access violation executing location 0x0000000000000000.
ErrorHandler.hpp
#define GLCall(x) x;\
if(isError) __debugbreak();
static bool isError{ false };
namespace ErrorHandler {
void APIENTRY GLDebugMessageCallback(GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar* message, const void* userParam);
}
ErrorHandler.cpp
void APIENTRY ErrorHandler::GLDebugMessageCallback(GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar* message, const void* userParam)
{
isError = true;
const char* _source;
const char* _type;
const char* _severity;
switch (source) ...
switch (type) ...
switch (severity) ...
if (_severity != "NOTIFICATION") {
fprintf(stderr, "OpenGL error [%d]: %s of %s severity, raised from %s: %s\n",
id, _type, _severity, _source, message);
}
}
Game.cpp
Game::Game(const char* title, uint16_t width, uint16_t height)
{
if (SDL_Init(SDL_INIT_VIDEO) < 0) ...
m_window = SDL_CreateWindow(title, SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, width, height, SDL_WINDOW_OPENGL);
if (!m_window) ...
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_FLAGS, SDL_GL_CONTEXT_DEBUG_FLAG);
SDL_GL_SetSwapInterval(1);
glewExperimental = GL_TRUE;
if (glewInit() != GLEW_OK) ...
m_context = SDL_GL_CreateContext(m_window);
if (!m_context) ...
printf("%s\n", glGetString(GL_VERSION));
#ifdef _DEBUG
glEnable(GL_DEBUG_OUTPUT);
glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS);
glDebugMessageCallback(ErrorHandler::GLDebugMessageCallback, 0);
#endif
m_run();
}
I've tried:
Moving the glDebugMessageCallback to diffrent lines (directly after initializing glew, after creating context).
I've tried to use another function as a callback.
I've tried to explicitly set the OpenGL version (4.6.0) and (4.4.0).
I've tried to remove any sdl flags (profile flag and version flags).
Everything gives the same result (Access violation).

You must call
SDL_GL_MakeCurrent(m_window, m_context);
to activate your OpenGL context prior to calling any OpenGL functions. Otherwise the OpenGL functions do not know which context to operate on. Presumably GLFW did that for you automatically, but SDL does not.
I'm not familiar with GLEW, but there's a good chance that glewInit also expects a valid context to be active. So the order of operations should be as follows:
m_window = SDL_CreateWindow(title, SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, width, height, SDL_WINDOW_OPENGL);
// ...
m_context = SDL_GL_CreateContext(m_window);
SDL_GL_MakeCurrent(m_window, m_context);
glewExperimental = GL_TRUE;
if (!glewInit()) // ...
printf("%s\n", glGetString(GL_VERSION));
// ...
glDebugMessageCallback(ErrorHandler::GLDebugMessageCallback, 0);

Related

Seg fault while calling glfwSwapBuffers

It seems that I am having a seg fault while using GLFW and OpenGL on ArchLinux, DWM (fully updated and patched).
I retraced the code and it is having the segFault in the glfwSwapBuffers(window).
Here is my code :
main.cpp
#include <iostream>
#include "gui/window.h"
int main(int, char**) {
Window window("Test GL", 800, 600);
if(!window.hasCorrectlyLoaded()) {
return 1;
}
while (!window.shouldClose())
{
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
window.pollEvents();
}
}
window.h
#ifndef __WINDOW_H__
#define __WINDOW_H__
#include <string>
#include <glad/gl.h>
#include <GLFW/glfw3.h>
class Window {
private:
GLFWwindow *window;
bool correctlyLoaded;
public:
Window(const std::string&, int, int);
~Window();
const bool hasCorrectlyLoaded();
const bool shouldClose();
const void pollEvents();
};
#endif // __WINDOW_H__
window.cpp
#include "window.h"
#include <spdlog/spdlog.h>
Window::Window(const std::string& title, int width, int height)
{
correctlyLoaded = false;
if(!glfwInit()) {
spdlog::default_logger()->critical("Could not load GLFW");
return;
}
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GLFW_TRUE);
GLFWwindow* window = glfwCreateWindow(width, height, title.c_str(), nullptr, nullptr);
if (!window)
{
spdlog::default_logger()->critical("Failed to create GLFW window !");
return;
}
glfwMakeContextCurrent(window);
if (!gladLoadGL(glfwGetProcAddress))
{
spdlog::default_logger()->critical("Failed to load OpenGL !");
return;
}
spdlog::default_logger()->info("Loaded OpenGL {}", glfwGetVersionString());
glViewport(0, 0, width, height);
correctlyLoaded = true;
}
const void Window::pollEvents()
{
glfwSwapBuffers(window);
glfwPollEvents(); //<- Seg fault here
}
Window::~Window()
{
glfwTerminate();
}
const bool Window::hasCorrectlyLoaded()
{
return correctlyLoaded;
}
const bool Window::shouldClose()
{
return glfwWindowShouldClose(window);
}
While further researching, I stumbled upon an answer that told me to set the glfwWindowHint(GLFW_CLIENT_API, GLFW_OPENGL_API) window hint but I still got a segfault, but at a different place :
GLFW source code
GLFWAPI void glfwSwapBuffers(GLFWwindow* handle)
{
_GLFWwindow* window = (_GLFWwindow*) handle;
assert(window != NULL);
_GLFW_REQUIRE_INIT();
if (window->context.client == GLFW_NO_API)
{
_glfwInputError(GLFW_NO_WINDOW_CONTEXT,
"Cannot swap buffers of a window that has no OpenGL or OpenGL ES context"); //<- Seg fault without window hint
return;
}
window->context.swapBuffers(window); //<- Seg fault with window hint
}
Here is the output I get from the logging :
[2022-05-24 20:01:04.252] [info] Loaded OpenGL 3.4.0 X11 GLX Null EGL OSMesa monotonic
[1] 432406 segmentation fault (core dumped) /home/lygaen/code/testgl/build/testgl
Your problem occurs in Window.cpp, at this line:
//...
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GLFW_TRUE);
GLFWwindow* window = glfwCreateWindow(width, height, title.c_str(), nullptr, nullptr); //<---
if (!window)
{
//...
You've redeclared window as a local variable to this constructor, and as a result, the pointer never escapes the constructor, and is dangled.
A good habit when trying to assign class members is to use the this keyword. It is often redundant, but it does help indicate intent. So the code should be changed to this:
//...
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GLFW_TRUE);
this->window = glfwCreateWindow(width, height, title.c_str(), nullptr, nullptr); //<---
if (!this->window)
{
//...
If your style guidelines don't permit it, you can omit the this->; the only important part is that you're not declaring an entirely new variable that's shadowing the class member.

glewInit() Causes memory leak according to valgrind

I am trying to debug and fix all memory leaks in my program. I have went through the entire source code and there is not one call of new or malloc() which isn't supported by a free() or delete. I tried running the program in valgrind. Valgrind found that a consistent(throughout multiple executions of the program) 844 bytes of data were definitely lost. It also constantly points me back to the glewInit() function of my Window class. Is there anything I am doing wrong?
Couple things to note:
My window class is completely static
My window class calls InputManager::init() which is also a static class
I have another completely static class for storing constants
class Window {
public:
// void create(unsigned int width, unsigned int height, const std::string& name, bool resizable, bool decorated){
//
// }
static void create(unsigned int width, unsigned int height, const std::string& name, bool resizable, bool decorated){
if(!glfwInit()){
Utils::log("Failed to initialize GLFW");
return;
}
//Setting Window settings
glfwWindowHint(GLFW_RED_BITS, 8);
glfwWindowHint(GLFW_GREEN_BITS, 8);
glfwWindowHint(GLFW_BLUE_BITS, 8);
glfwWindowHint(GLFW_ALPHA_BITS, 8);
glfwWindowHint(GLFW_DEPTH_BITS, 24);
glfwWindowHint(GLFW_STENCIL_BITS, 8);
glfwWindowHint(GLFW_DOUBLEBUFFER, GLFW_TRUE);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_RESIZABLE, resizable ? GLFW_TRUE : GLFW_FALSE);
glfwWindowHint(GLFW_DECORATED, decorated ? GLFW_TRUE : GLFW_FALSE);
m_width = width;
m_height = height;
#ifdef __APPLE__
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
#endif
//Creating the window
window = glfwCreateWindow(width, height, name.c_str(), NULL, NULL);
if(!window){
Utils::log("Window: Failed to create window");
return;
}
//Settings for window
glfwSwapInterval(1);
glfwSetFramebufferSizeCallback(window, windowResized);
//Creating the context for opengl
glfwMakeContextCurrent(window);
//Initializing glew
if(glewInit() != GLEW_OK){
Utils::log("Window: Failed to initialize glew");
}
//Enabling transparency
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
//Enabling depth
glEnable(GL_DEPTH_TEST);
glClearDepthf(1.0f);
//Enabling back face culling
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
//Enabling MSAA
glEnable(GL_MULTISAMPLE);
InputManager::init(window);
}
static void clear();
static void update();
static void close();
//Window functions
static void setVerticalSyncEnabled(bool enabled);
static void setMouseCursorGrabbed(bool grabbed);
static int getWidth();
static int getHeight();
static bool isResized();
static bool isCloseRequested();
static GLFWwindow* window;
private:
static void windowResized(GLFWwindow* window, int width, int height);
static int m_width;
static int m_height;
static bool m_isResized;
static bool m_closeRequested;
};
#endif
I started using GLAD and it stopped causing the memory leak.

Getting Exception thrown for glViewport(0, 0, framebufferWidth, framebufferHight);

#include "list.h"
int main()
{
//INIT GLFW
glfwInit();
//CREATE WINDOW
const int WINDOW_WIDTH = 640;
const int WINDOW_HEIGHT = 480;
int framebufferWidth = 0;
int framebufferHight = 0;
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 4);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow* window = glfwCreateWindow(WINDOW_WIDTH,WINDOW_HEIGHT,"Title", NULL, NULL);
glfwGetFramebufferSize(window, &framebufferWidth, &framebufferHight);
glViewport(0, 0, framebufferWidth, framebufferHight);
glfwMakeContextCurrent(window);//IMPORTIANT!!
//INIT GLEW (NEEDS WINDOW AND OPENGL CONTEXT)
glewExperimental = GL_TRUE;
>//Error
if (glewInit() != GLEW_OK)
{
std::cout << "ERROR::MAIN.CPP::GLEW_INIT_FAILED" << "\n";
glfwTerminate();
}
//MAIN LOOP
while (glfwWindowShouldClose(window))
{
//UPDATE INPUT ---
//UPDATE ---
//DRAW ---
//Clear
//Draw
//End Draw
}
//END OF PROGAM
glfwTerminate();
return 0;
}
glViewport(0, 0, framebufferWidth, framebufferHight); is giving me
Unhandled exception at >0x00007FF704D6E7D9 in OpenzGL4.exe: 0xC0000005: Access violation reading >location >0x0000000000000348.
when I run it.
For any OpenGL instruction is required a valid and current OpenGL Context. Hence glfwMakeContextCurrent hast to be invoked before any OpneGL instruction:
GLFWwindow* window = glfwCreateWindow(WINDOW_WIDTH,WINDOW_HEIGHT,"Title", NULL, NULL);
glfwMakeContextCurrent(window); // <----- ADD
glfwGetFramebufferSize(window, &framebufferWidth, &framebufferHight);
glViewport(0, 0, framebufferWidth, framebufferHight);
glfwMakeContextCurrent(window); // <----- DELETE
In addition to what Rabbid76 already wrote in his answer, there is another problem in your code:
glViewport(0, 0, framebufferWidth, framebufferHight);
glfwMakeContextCurrent(window);//IMPORTIANT!!
//INIT GLEW (NEEDS WINDOW AND OPENGL CONTEXT)
glewExperimental = GL_TRUE;
>//Error
if (glewInit() != GLEW_OK) {
std::cout << "ERROR::MAIN.CPP::GLEW_INIT_FAILED" << "\n";
glfwTerminate(); }
Since you use the GLEW OpenGL loader, every gl...() Function name is actually remapped as a preprocessor macro to a function pointer, and glewInit will query all those function pointers (and that needs an active OpenGL context, so it can't be done before the glfwMakeContextCurrent). So it is not enough to move the glViewport after the glfwMakeContextCurrent, you must also move it after glewInit.
And there is a second issue with this code: the glewExperimental = GL_TRUE is an evil hack for a bug in GLEW 1.x with OpenGL core profiles, and it's use can't be discouraged enough. Just update to GLEW 2.x or another loader which is compatible with OpenGL core profile contexts.

GLFW unable to get OpenGL 4.1 context on Mavericks

I am trying to get an OpenGL context above version 2 on mac using GLFW. My configuration is Mavericks (10.9.1) + XCode and I have an Nvidia Geforce 650M GPU with OpenGL 4.1 Full Profile potentially supported. I use the following code:
static void test_error_cb (int error, const char *description)
{
fprintf(stderr, "%d: %s\n", error, description);
}
int main(void)
{
GLFWwindow* window;
glfwSetErrorCallback(test_error_cb);
// Initialise GLFW
if (!glfwInit())
{
fprintf(stderr, "Failed to initialize GLFW\n");
exit(EXIT_FAILURE);
}
//Request Specific Version
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// Open OpenGL window
window = glfwCreateWindow(500, 500, "Split view demo", NULL, NULL);
if (!window)
{
fprintf(stderr, "Failed to open GLFW window\n");
glfwTerminate();
exit(EXIT_FAILURE);
}
// Set callback functions
glfwSetFramebufferSizeCallback(window, framebufferSizeFun);
glfwSetWindowRefreshCallback(window, windowRefreshFun);
glfwSetCursorPosCallback(window, cursorPosFun);
glfwSetMouseButtonCallback(window, mouseButtonFun);
glfwSetKeyCallback(window, key_callback);
// Enable vsync
glfwMakeContextCurrent(window);
glfwSwapInterval(1);
glfwGetFramebufferSize(window, &width, &height);
framebufferSizeFun(window, width, height);
//Check Version
int major, minor, rev;
major = glfwGetWindowAttrib(window, GLFW_CONTEXT_VERSION_MAJOR);
minor = glfwGetWindowAttrib(window, GLFW_CONTEXT_VERSION_MINOR);
rev = glfwGetWindowAttrib(window, GLFW_CONTEXT_REVISION);
printf("OpenGL version recieved: %d.%d.%d\n", major, minor, rev);
printf("Supported OpenGL is %s\n", (const char*)glGetString(GL_VERSION));
printf("Supported GLSL is %s\n", (const char*)glGetString(GL_SHADING_LANGUAGE_VERSION));
// Main loop
for (;;)
{
// Only redraw if we need to
if (do_redraw)
{
// Draw all views
drawAllViews();
// Swap buffers
glfwSwapBuffers(window);
do_redraw = 0;
}
// Wait for new events
glfwWaitEvents();
// Check if the window should be closed
if (glfwWindowShouldClose(window))
break;
}
// Close OpenGL window and terminate GLFW
glfwTerminate();
exit(EXIT_SUCCESS);
}
Currently glfwCreateWindow function fails. Without any hints (i.e. no glfwWindowHint calls) I can only have OpenGL 2.1 with glsl version at 1.20. Advise.
A Core context is required to access GL versions greater than 2.1 on OSX.
Uncomment your GLFW_OPENGL_PROFILE hint.
Try adding this before your #include <GLFW/glfw3.h> and uncomment your profile hint.
#define GLFW_INCLUDE_GLCOREARB
Not sure why you're disabling the core profile hint. I don't have an answer, but you might get some more information via the error callback, e.g.,
extern "C"
{
static void test_error_cb (int error, const char *description)
{
fprintf(stderr, "%d: %s\n", error, description);
}
}
...
{
glfwSetErrorCallback(test_error_cb);
if (glfwInit() != GL_TRUE)
{
....
This is a function that can be used prior to glfwInit.

GLEW OpenGL Access violation when using glGenVertexArrays

In my case.. This problem was fixed by updating the driver of the graphics card.
I have searched and found people with the same problem on stackoverflow and the internet. However, the answers aren't solving my problem.
I am using SDL2 and GLEW. When I run the application, I receive an ''Access violation'' error when this function is executed:
glGenVertexArrays(1, &VertexArrayID);
My code:
bool Game::initSDL(char* title, int xpos, int ypos, int width, int height, int flags) {
if(SDL_Init(SDL_INIT_EVERYTHING)>=0) {
Uint32 flags = SDL_WINDOW_SHOWN|SDL_WINDOW_OPENGL;
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION,4);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 0);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
mainWindow = SDL_CreateWindow(title, xpos, ypos, width, height, flags);
mainGLContext = SDL_GL_CreateContext(mainWindow);
SDL_GL_SetSwapInterval(1);
// Initialize GLEW
glewExperimental = true; // Needed for core profile
GLenum err = glewInit();
if (GLEW_OK != err)
{
/* Problem: glewInit failed, something is seriously wrong. */
fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
}
// Dark blue background
glClearColor(0.0f, 0.0f, 0.4f, 0.0f);
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
} else {
return false;
}
return true;
}
try adding this, glewExperimental = GL_TRUE;, before glewInit().
glewExperimental = GL_TRUE;
glewInit();