I am trying to debug and fix all memory leaks in my program. I have went through the entire source code and there is not one call of new or malloc() which isn't supported by a free() or delete. I tried running the program in valgrind. Valgrind found that a consistent(throughout multiple executions of the program) 844 bytes of data were definitely lost. It also constantly points me back to the glewInit() function of my Window class. Is there anything I am doing wrong?
Couple things to note:
My window class is completely static
My window class calls InputManager::init() which is also a static class
I have another completely static class for storing constants
class Window {
public:
// void create(unsigned int width, unsigned int height, const std::string& name, bool resizable, bool decorated){
//
// }
static void create(unsigned int width, unsigned int height, const std::string& name, bool resizable, bool decorated){
if(!glfwInit()){
Utils::log("Failed to initialize GLFW");
return;
}
//Setting Window settings
glfwWindowHint(GLFW_RED_BITS, 8);
glfwWindowHint(GLFW_GREEN_BITS, 8);
glfwWindowHint(GLFW_BLUE_BITS, 8);
glfwWindowHint(GLFW_ALPHA_BITS, 8);
glfwWindowHint(GLFW_DEPTH_BITS, 24);
glfwWindowHint(GLFW_STENCIL_BITS, 8);
glfwWindowHint(GLFW_DOUBLEBUFFER, GLFW_TRUE);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_RESIZABLE, resizable ? GLFW_TRUE : GLFW_FALSE);
glfwWindowHint(GLFW_DECORATED, decorated ? GLFW_TRUE : GLFW_FALSE);
m_width = width;
m_height = height;
#ifdef __APPLE__
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
#endif
//Creating the window
window = glfwCreateWindow(width, height, name.c_str(), NULL, NULL);
if(!window){
Utils::log("Window: Failed to create window");
return;
}
//Settings for window
glfwSwapInterval(1);
glfwSetFramebufferSizeCallback(window, windowResized);
//Creating the context for opengl
glfwMakeContextCurrent(window);
//Initializing glew
if(glewInit() != GLEW_OK){
Utils::log("Window: Failed to initialize glew");
}
//Enabling transparency
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
//Enabling depth
glEnable(GL_DEPTH_TEST);
glClearDepthf(1.0f);
//Enabling back face culling
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
//Enabling MSAA
glEnable(GL_MULTISAMPLE);
InputManager::init(window);
}
static void clear();
static void update();
static void close();
//Window functions
static void setVerticalSyncEnabled(bool enabled);
static void setMouseCursorGrabbed(bool grabbed);
static int getWidth();
static int getHeight();
static bool isResized();
static bool isCloseRequested();
static GLFWwindow* window;
private:
static void windowResized(GLFWwindow* window, int width, int height);
static int m_width;
static int m_height;
static bool m_isResized;
static bool m_closeRequested;
};
#endif
I started using GLAD and it stopped causing the memory leak.
Related
It seems that I am having a seg fault while using GLFW and OpenGL on ArchLinux, DWM (fully updated and patched).
I retraced the code and it is having the segFault in the glfwSwapBuffers(window).
Here is my code :
main.cpp
#include <iostream>
#include "gui/window.h"
int main(int, char**) {
Window window("Test GL", 800, 600);
if(!window.hasCorrectlyLoaded()) {
return 1;
}
while (!window.shouldClose())
{
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
window.pollEvents();
}
}
window.h
#ifndef __WINDOW_H__
#define __WINDOW_H__
#include <string>
#include <glad/gl.h>
#include <GLFW/glfw3.h>
class Window {
private:
GLFWwindow *window;
bool correctlyLoaded;
public:
Window(const std::string&, int, int);
~Window();
const bool hasCorrectlyLoaded();
const bool shouldClose();
const void pollEvents();
};
#endif // __WINDOW_H__
window.cpp
#include "window.h"
#include <spdlog/spdlog.h>
Window::Window(const std::string& title, int width, int height)
{
correctlyLoaded = false;
if(!glfwInit()) {
spdlog::default_logger()->critical("Could not load GLFW");
return;
}
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GLFW_TRUE);
GLFWwindow* window = glfwCreateWindow(width, height, title.c_str(), nullptr, nullptr);
if (!window)
{
spdlog::default_logger()->critical("Failed to create GLFW window !");
return;
}
glfwMakeContextCurrent(window);
if (!gladLoadGL(glfwGetProcAddress))
{
spdlog::default_logger()->critical("Failed to load OpenGL !");
return;
}
spdlog::default_logger()->info("Loaded OpenGL {}", glfwGetVersionString());
glViewport(0, 0, width, height);
correctlyLoaded = true;
}
const void Window::pollEvents()
{
glfwSwapBuffers(window);
glfwPollEvents(); //<- Seg fault here
}
Window::~Window()
{
glfwTerminate();
}
const bool Window::hasCorrectlyLoaded()
{
return correctlyLoaded;
}
const bool Window::shouldClose()
{
return glfwWindowShouldClose(window);
}
While further researching, I stumbled upon an answer that told me to set the glfwWindowHint(GLFW_CLIENT_API, GLFW_OPENGL_API) window hint but I still got a segfault, but at a different place :
GLFW source code
GLFWAPI void glfwSwapBuffers(GLFWwindow* handle)
{
_GLFWwindow* window = (_GLFWwindow*) handle;
assert(window != NULL);
_GLFW_REQUIRE_INIT();
if (window->context.client == GLFW_NO_API)
{
_glfwInputError(GLFW_NO_WINDOW_CONTEXT,
"Cannot swap buffers of a window that has no OpenGL or OpenGL ES context"); //<- Seg fault without window hint
return;
}
window->context.swapBuffers(window); //<- Seg fault with window hint
}
Here is the output I get from the logging :
[2022-05-24 20:01:04.252] [info] Loaded OpenGL 3.4.0 X11 GLX Null EGL OSMesa monotonic
[1] 432406 segmentation fault (core dumped) /home/lygaen/code/testgl/build/testgl
Your problem occurs in Window.cpp, at this line:
//...
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GLFW_TRUE);
GLFWwindow* window = glfwCreateWindow(width, height, title.c_str(), nullptr, nullptr); //<---
if (!window)
{
//...
You've redeclared window as a local variable to this constructor, and as a result, the pointer never escapes the constructor, and is dangled.
A good habit when trying to assign class members is to use the this keyword. It is often redundant, but it does help indicate intent. So the code should be changed to this:
//...
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GLFW_TRUE);
this->window = glfwCreateWindow(width, height, title.c_str(), nullptr, nullptr); //<---
if (!this->window)
{
//...
If your style guidelines don't permit it, you can omit the this->; the only important part is that you're not declaring an entirely new variable that's shadowing the class member.
When I attempt to generate a buffer by executing the glGenBuffer() function - no function like that is found.
Some functions are still working, and from what I see most do work, for instance the following code works perfectly:
#include <iostream>
#include <GLFW/glfw3.h>
using namespace std;
class Window_Manager
{
public:
GLFWwindow* window;
int Create_Window(int width, int height, const char* title) {
if (!glfwInit()) {
return -1;
}
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_RESIZABLE, GL_TRUE);
glfwWindowHint(GLFW_FOCUSED, GL_TRUE);
window = glfwCreateWindow(width, height, title, NULL, NULL);
if (window == NULL) {
cout << "Failed to create a window! Aborting early..." << endl;
Terminate_Window(window);
return -1;
}
glfwMakeContextCurrent(window);
glViewport(0, 0, width, height);
return 1;
}
void Terminate_Window(GLFWwindow* window) {
glfwSetWindowShouldClose(window, GL_TRUE);
glfwTerminate();
}
};
but this code does not:
#include <GLFW/glfw3.h>
#include <Engine_Manager.h>
#include <iostream>
using namespace std;
class Shape2D
{
int VBO;
int Setting_Up(){
VBO = glGenBuffer();
return 1;
}
};
In addition to replacing glGenBuffer with glGenBuffers(1, &VBO) as rpress mentioned in the comments, OpenGL is a weird library in that you have to load most of the functions dynamically.
The exact details differ from platform to platform. On Windows, for example, you can use wglGetProcAddress to get pointers to desired functions.
Rather than do it manually, GLEW is an excellent library for getting OpenGL function pointers easily. Other options are documented on the OpenGL wiki.
I Have a int main(); function and a int Game(); function where in the main function I have a window I use in both functions. I have defined the window in the global scope and when I try to run the Game(); function before GLFW loop in main function the window opens for a second and then closes. I then get
Error: The GLFW library is not initialised
printed from error_callback();
Here's My Code
#include <GLFW/glfw3.h>
#include <GL/freeglut.h>
#include <GL/GL.h>
#include <stdio.h>
#include <string>
#include <iostream>
using namespace std;
void drawText(const char *text, int length, int x, int y);
void framebuffer_size_callback(GLFWwindow* wndow, int width, int height);
void DrawCube(GLfloat centerPosX, GLfloat centerPosY, GLfloat centerPosZ, GLfloat edgeLength);
void button(GLfloat red, GLfloat green, GLfloat blue, int x, int y, int width, int height);
void error_callback(int error, const char* description);
int Game();
int width = 860, height = 490;
GLFWwindow* window;
int main(int argc, char **argv) {
//GLTtext* text = gltCreateText();
//gltSetText(text, "ElectroCraft");
//GLFWwindow* window;
int width = 860, height = 490;
if (!glfwInit()) {
printf("failed to init glfw");
return -1;
}
glutInit(&argc, argv);
window = glfwCreateWindow(width, height, "ElectroCraft", NULL, NULL);
glfwMakeContextCurrent(window);
if (!window) {
printf("failed to start window");
glfwTerminate();
return -1;
}
Game();
glEnable(GL_DEPTH_TEST);
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, width, 0, height, 0, 1000);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
string text;
glfwSetErrorCallback(error_callback);
while (!glfwWindowShouldClose(window)) {
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
glClearColor(53.0f / 255.0f, 81.0f / 255.0f, 92.0f / 255.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBegin(GL_LINES);
glVertex2f(0, height-80);
glVertex2f(width, height - 80);
glEnd();
button(192.0f / 255.0f, 192.0f / 255.0f, 192.0f / 255.0f, width / 2 - 70, height / 2, 260, 50);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 1;
}
void error_callback(int error, const char* description) {
fprintf(stderr, "Error: %s\n", description);
}
int Game() {
//glfwMakeContextCurrent(window);
glEnable(GL_DEPTH_TEST);
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, width, 0, height, 0, 1000);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
while (!glfwWindowShouldClose) {
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
glClearColor(62.0f / 255.0f, 85.9f / 255.0f, 255.0 / 255.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 1;
}
This could be because I didn't initiate the GLFW library globally but I couldn't seem to do this as I got an error if a if statement was outside of a function
the window opens for a second and then closes
Of course, because glfwWindowShouldClose in not a function call in the function Game. glfwWindowShouldClose is a function pointer, so !glfwWindowShouldClose evaluates to false and the loop never runs:
while (!glfwWindowShouldClose) {
while (!glfwWindowShouldClose(window)) {
I then get Error: The GLFW library is not initialised
This is because GLFW is terminated by glfwTerminate() in the function Game. Delete glfwTerminate() from the function Game.
But note, once you've closed a window, you've to create a new window. An option would be to hide glfwHideWindow and show glfwShowWindow a window.
I'm using Visual Studio 2013, and as I am learning OpenGL 3.3 I thought best to use
#define GLFW_INCLUDE_GLCOREARB
#include <GLFW/glfw3.h>
to force 'intellisense' to not even show old depreciated functions such as glVertex2f etc...
However the inclusion of said #define prevents any gl* functions from showing up. Even glViewport is undefined. When attempting to compile a simple application I get among many errors
error C3861: 'glViewport': identifier not found
glcorearb.h is my include files path though, downloaded from http://www.opengl.org/registry/api/GL/glcorearb.h only yesterday.
I might be doing something completely wrong here. But here is my full source code...
// Include standard headers
#include <stdio.h>
#include <stdlib.h>
#define GLFW_INCLUDE_GLCOREARB
// Include GLFW3
#include <GLFW/glfw3.h>
//Error Callback - Outputs to STDERR
static void error_callback(int error, const char* description)
{
fputs(description, stderr);
}
//Key Press Callback
static void key_callback(GLFWwindow* window, int key, int scancode, int action, int mods)
{
if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GL_TRUE);
}
int main(){
GLFWwindow* window;
glfwSetErrorCallback(error_callback);
// Initialise GLFW
if (!glfwInit())
{
fputs("Failed to initialize GLFW\n", stderr);
exit(EXIT_FAILURE);
}
glfwWindowHint(GLFW_SAMPLES, 2); // 2x antialiasing
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); // We want OpenGL 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); //We don't want the old OpenGL
// Open a window and create its OpenGL context
window = glfwCreateWindow(640, 480, "Test", NULL, NULL);
if (!window)
{
glfwTerminate();
exit(EXIT_FAILURE);
}
glfwMakeContextCurrent(window);
glfwSetKeyCallback(window, key_callback);
while (!glfwWindowShouldClose(window))
{
float ratio;
int width, height;
glfwGetFramebufferSize(window, &width, &height);
ratio = width / (float)height;
glViewport(0, 0, width, height);
glClearColor(0.5f, 0.7f, 1.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
exit(EXIT_SUCCESS);
}
I have problem understanding some opengl stuff using GLFW and GLEW.
i have 3 files shown below:
main.cpp:
#include "gamewindow.h"
int main() {
GameWindow *gameWindow = new GameWindow(1024, 768, "FirstOpenGLGame");
/* Loop until the user closes the window */
while (gameWindow->getRunning()) {
/* Render here */
gameWindow->render();
gameWindow->update();
gameWindow->setRunning();
}
delete gameWindow;
glfwTerminate();
return 0;
}
This is where the problem is, gamewindow.cpp:
#include "gamewindow.h"
GameWindow::GameWindow(int width, int height, const char* title) : _running(true), _height(1024), _width(1024 * (16/9))
{
/* Initialize the library */
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(width, height, title, NULL, NULL);
if(!window) {
glfwTerminate();
exit(0);
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
if(!glewInit()){ // <-- problem is this
glfwTerminate();
exit(EXIT_FAILURE);
}
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
coordSettings();
}
void GameWindow::setRunning() {
_running = !glfwWindowShouldClose(window);
}
bool GameWindow::getRunning() {
return _running;
}
void GameWindow::render() {
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(1.0f, 0.0f, 0.0f);
glBegin(GL_QUADS);
glVertex2d(0.0f, 0.0f);
glVertex2d(100.0f, 0.0f);
glVertex2d(100.0f, 800.0f);
glVertex2d(0.0f, 800.0f);
glEnd();
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
void GameWindow::update() {
}
void GameWindow::coordSettings() {
glViewport( 0, 0, _width, _height );
glMatrixMode(GL_PROJECTION);
glOrtho(0.0, _width, 0.0, _height, 0.0, -1.0);
glMatrixMode(GL_MODELVIEW);
}
and last the header file gamewindow.h:
#ifndef GAMEWINDOW_H
#define GAMEWINDOW_H
#include <iostream>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
class GameWindow
{
private:
GLFWwindow* window;
bool _running;
GLfloat _width;
GLfloat _height;
void coordSettings();
public:
GameWindow(int width, int height, const char* title);
void setRunning();
bool getRunning();
void render();
void update();
};
#endif // GAMEWINDOW_H
everything works fine, but then i try to call glewInit() (without really understanding is i need to, or when i need to) but then nothing works. the program starts, but there is no window with a quad in it, like before. why is this? how is GLEW even used, and do i need it?