I am writing a game engine in C++ and OpenGL and when I open my first test window i receive this issue:
Segmentation fault (core dumped)
There is no error when compiling, and it compiles just fine. The problem lies when I go to open the window.
HOW TO REPRODUCE:
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <iostream>
void framebuffer_size_callback(GLFWwindow* window, int width, int height);
void framebuffer_size_callback(GLFWwindow* window, int width, int height)
{
glViewport(0, 0, width, height);
}
int main()
{
typedef void (*GL_GENBUFFERS) (GLsizei, GLuint*);
GL_GENBUFFERS glGenBuffers = (GL_GENBUFFERS) glfwGetProcAddress("glGenBuffers");
unsigned int buffer;
glGenBuffers(1, &buffer);
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(800, 600, "BloodBunny", NULL, NULL);
if (window == NULL)
{
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
{
std::cout << "Failed to initialize GLAD" << std::endl;
return -1;
}
while(!glfwWindowShouldClose(window))
{
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
glViewport(0,0,800,600);
glfwTerminate();
return 0;
}
glfwGetProcAddress() will only (potentially, depending on the name requested and the GL version of the current context) return usable, non-NULL function-pointers if a GL context is current.
With GLFW you make a window's context current via glfwMakeContextCurrent() so if you truly want to grab your own pointer to glGenBuffers() you need to move the glfwGetProcAddress() call to after a successful glfwMakeContextCurrent():
...
glfwMakeContextCurrent(window);
...
typedef void (*GL_GENBUFFERS) (GLsizei, GLuint*);
GL_GENBUFFERS glGenBuffers = (GL_GENBUFFERS) glfwGetProcAddress("glGenBuffers");
unsigned int buffer;
glGenBuffers(1, &buffer);
...
...though honestly there's little point since you're calling gladLoadGLLoader() right after glfwMakeContextCurrent() which will populate a perfectly usable glGenBuffers() pointer at global scope all on its own.
Related
I'm seeing "ERROR 1282 in glViewport" in my terminal whne I call glViewport from inside glfwSetFramebufferSizeCallback. Not sure why, I think it should be firing from the correct context, but I'm not sure. It worked fine before I tried to move my gameloop into a thread.
Here's the code, I tried to trim out the extraneous stuff:
const int winWidth = 800;
const int winHeight = 600;
absl::Status glfwMain() {
gl::setOpenGlVersion(4, 6, GLFW_OPENGL_CORE_PROFILE);
ASSIGN_OR_RETURN(GLFWwindow * window, gl::createWindow(winWidth, winHeight, "mario4ever"))
std::atomic<bool> breakLoop = false;
auto gameLoop = std::async(std::launch::async, [&window, &breakLoop]() {
glfwMakeContextCurrent(window);
if (!gladLoadGLLoader((GLADloadproc) glfwGetProcAddress)) {
return absl::InternalError("Failed to initialize GLAD");
}
/* snip */
glViewport(0, 0, winWidth, winHeight);
glfwSetFramebufferSizeCallback(window, [](GLFWwindow *window, int width, int height) {
std::cerr << "glfwSetFramebufferSizeCallback" << std::endl;
glViewport(0, 0, width, height); // <----- error fires here
});
while (!breakLoop) {
draw(window);
}
/* snip */
return absl::OkStatus();
});
while (!glfwWindowShouldClose(window)) {
processInput(window);
glfwPollEvents();
}
breakLoop = true;
return gameLoop.get();
return absl::OkStatus();
}
int main() {
glfwInit();
auto status = glfwMain();
glfwTerminate();
if (!status.ok()) {
std::cerr << status << std::endl;
}
return status.raw_code();
}
Whether I put the glfwSetFramebufferSizeCallback inside the std::async or outside doesn't seem to make a difference.
What's the problem?
This tutorial says error 1282 is GL_INVALID_VALUE and according to these docs that should happen only when "either width or height is negative".
I checked the width and height that the callback is giving me and neither is negative. They're about what I'd expect.
Tried adding a mutex:
std::mutex drawMutex;
void draw(GLFWwindow *window) {
std::lock_guard<std::mutex> lock(drawMutex);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
}
...
// inside `std::async` callback:
glfwSetFramebufferSizeCallback(window, [](GLFWwindow *window, int width, int height) {
std::lock_guard<std::mutex> lock(drawMutex);
glViewport(0, 0, width, height);
});
I'm trying to compile a shader program, but I'm getting the runtime error message specified above. I've tried for a while to figure out why this is occurring and what it means, but I'm pretty stuck.
Here is the relevant source code:
Main.cpp:
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include "shader.hpp"
void initGLFW() {
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
#ifdef __APPLE__
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
#endif
}
GLFWwindow* createWindow(int width, int height) {
GLFWwindow* window = glfwCreateWindow(width, height, "LearnOpenGL", NULL, NULL);
if (window == NULL) {
throw runtime_error("Failed to create GLFW window");
}
glfwMakeContextCurrent(window);
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
return window;
}
int main() {
initGLFW();
GLFWwindow* window = createWindow(WIDTH, HEIGHT);
initGLAD();
Shader sh {"vertex-shader.txt", "fragment-shader.txt"};
}
Shader.cpp
unsigned int Shader::compileShader(ShaderType shaderType, const char* srcShader) {
unsigned int shader {};
string shaderStr {};
if (shaderType == ShaderType::vertex) {
shader = glCreateShader(GL_VERTEX_SHADER); // THIS IS WHERE THE RUNTIME ERROR COMES UP
shaderStr = "Vertex";
}
else if (shaderType == ShaderType::fragment) {
shader = glCreateShader(GL_FRAGMENT_SHADER);
shaderStr = "Fragment";
}
glShaderSource(shader, 1, &srcShader, nullptr);
glCompileShader(shader);
// Check for compilation failure
int success;
char infoLog[512];
glGetShaderiv(shader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(shader, 512, nullptr, infoLog);
ostringstream oss {};
oss << shaderStr << " shader compilation failed.\n" << infoLog << endl;
throw runtime_error(oss.str());
}
return shader;
}
Shader::Shader(const string& vertexShaderFilepath,
const string& fragmentShaderFilepath)
{
string srcVertexShader {shaderFromFile(vertexShaderFilepath)};
string srcFragmentShader {shaderFromFile(fragmentShaderFilepath)};
const char* srcVS_cstr = srcVertexShader.c_str();
const char* srcFS_cstr = srcFragmentShader.c_str();
unsigned int vertexShader = compileShader(ShaderType::vertex, srcVS_cstr);
unsigned int fragmentShader = compileShader(ShaderType::fragment, srcFS_cstr);
shaderProgram = linkShaders(vertexShader, fragmentShader);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
}
// More stuff in shader.cpp, I only included relevant bits of code.
Shader.hpp (mostly irrelevant):
#include <string>
using namespace std;
class Shader {
public:
Shader(const string& vertexShaderFilepath,
const string& fragmentShaderFilepath);
void activate();
void activateWithUniform(const string& uniformName, float uniformVal);
void activateWithUniform(const string& uniformName, int uniformVal);
private:
enum class ShaderType { vertex, fragment };
static string shaderFromFile(const string& filepath);
static unsigned int compileShader(ShaderType shaderType, const char* srcShader);
static unsigned int linkShaders(unsigned int shader1, unsigned int shader2);
unsigned int shaderProgram {};
};
Heres a screenshot showing what libraries I have linked.
I should mention that the shaders are the only thing thats not working in the program - if I comment out the shader, the remainder of the program will output a window with a greenish background. Here is the full source code, but I hope the pieces of code above is enough to provide the required information.
You should choose between GLAD and GLEW, but not use both: GLEW ships with its own set of symbols for OpenGL, but you're not calling glewInit so they remain at 0x0. Either drop the libGLEW library or the GLAD code.
I want to create a share GLFW window but GLFW is not able to create the second window.
i am able to create a single window but not two windows
This is my code.
I want to use the second window in another thread so i can share their context.
#include "pch.h"
#include <iostream>
#include <gl\glew.h>
#include <glfw3.h>
int SCR_WIDTH = 1920;
int SCR_HEIGHT = 1080;
int main()
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// glfw window creation
// --------------------
GLFWwindow* sharedWindow = NULL;
GLFWwindow* window = glfwCreateWindow(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", 0, sharedWindow);
if (window == NULL)
{
std::cout << "Failed to create the first GLFW window" << std::endl;
glfwTerminate();
return -1;
}
if (sharedWindow == NULL)
{
std::cout << "Failed to create the second GLFW window" << std::endl;
// glfwTerminate();
// return -1;
}
while (true)
{
}
std::cout << "Hello World!\n";
}
share is an input parameter. See glfwCreateWindow.
Create the first window and call glfwCreateWindow a 2nd time for the 2nd window:
GLFWwindow* wnd = glfwCreateWindow(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", 0, nullptr);
GLFWwindow* wnd2 = glfwCreateWindow(SCR_WIDTH, SCR_HEIGHT, "window 2", 0, window);
The window can even be created in a separate thread, but note you've to ensure that the OpenGL context of the 1st window is not current when you create the 2nd window.
#include <thread>
#include <mutex>
#include <condition_variable>
GLFWwindow *wnd = nullptr;
bool wnd2created = false;
std::mutex mtx;
std::condition_variable cv;
void wnd2func( void )
{
GLFWwindow *wnd2 = glfwCreateWindow( 800, 600, "window 2", nullptr, wnd );
{
std::unique_lock<std::mutex> lck(mtx);
wnd2created = true;
cv.notify_one();
}
if (wnd2 == nullptr)
return;
glfwMakeContextCurrent(wnd2);
// [...]
}
int main()
{
// [...]
wnd = glfwCreateWindow(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", 0, nullptr);
if (wnd == nullptr)
return -1;
std::thread wnd2thread(wnd2func);
{
std::unique_lock<std::mutex> lck(mtx);
cv.wait(lck, []() -> bool { return wnd2created; });
}
glfwMakeContextCurrent(wnd);
// [...]
}
For some reason my Window::callback is being called even after the mouse has left the window. I am unable to find a solution or even something that could help. Is it possible that GLFW updated how the mouse cursor callback operates? I wonder if it is an order of invocation problem?
Window
Window::Window(std::string title, int32_t width, int32_t height) {
// TODO: add support for monitor and share for GLFW
m_window = std::unique_ptr<GLFWwindow, GLFWdeleter>(glfwCreateWindow(width, height, title.c_str(), nullptr, nullptr));
glfwMakeContextCurrent(m_window.get());
glfwSetWindowUserPointer(m_window.get(), this);
glfwSetCursorPosCallback(m_window.get(), Window::callback);
}
void Window::mouse_callback(double xpos, double ypos) {
std::cout << "x: " << xpos << " y: " << ypos << std::endl;
}
void Window::callback(GLFWwindow* window, double xpos, double ypos)
{
auto win = static_cast<Window*>(glfwGetWindowUserPointer(window));
win->mouse_callback(xpos, ypos);
}
Engine
void startup() const
{
if (glfwInit() == 0)
{
LOG(kError, "GLFW init failed!");
exit(-1);
}
}
void Engine::run() {
if (m_main_window_registered)
{
glewExperimental = static_cast<GLboolean>(true);
if (glewInit() != GLEW_OK)
{
std::cout << "Failed to initialize glew" << std::endl;
return;
}
}
while(glfwWindowShouldClose(m_main_window->window()) == 0) {
glClear(GL_COLOR_BUFFER_BIT);
glfwSwapBuffers(m_main_window->window());
glfwPollEvents();
}
}
main.cpp
int main()
{
g_engine.startup();
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
auto window = std::make_unique<Window>("Hello World!", 640, 480);
//window->make_current();
g_engine.registerWindow(std::move(window));
g_engine.run();
glfwTerminate();
return 0;
}
I have figured out what the problem (or better put) issue is. On Windows the callback performs as expected where once the mouse leaves the windows area the callback stops firing. For OSX the window never loses focus and therefore the cursor callback is always being called. To fix the issue you simply have to test the coordinates to ensure that the mouse is in fact inside the window.
I'm using GLFW and GLEW.
But during initialization of my object glGenBuffers throws an exception
void Character::init2D(glm::vec3 top, glm::vec3 bottom_left, glm::vec3 bottom_right)
{
glm::vec3 Vertices[3];
Vertices[0] = bottom_left;
Vertices[1] = top;
Vertices[2] = bottom_right;
this->top = top;
this->left_front = bottom_left;
this->right_front = bottom_right;
glGenBuffers(1, &VBO); //throws an exception 0xC0000005: Access violation
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);
CompileShaders(shaderProgram, "vertex.shader", "fragment.shader");
}
I declare my class Character like this
#include <GL\glew.h>
#include <GLFW\glfw3.h>
#include <glm\glm.hpp>
#include <glm\gtc\type_ptr.hpp>
#pragma comment(lib, "glfw3.lib")
#pragma comment(lib, "glew32.lib")
class Character
{
private:
glm::vec3 top,
left_front,
right_front,
left_back,
right_back;
GLuint VBO;
GLuint shaderProgram;
public:
Character();
void init2D(glm::vec3 top,
glm::vec3 bottom_left,
glm::vec3 bottom_right);
void draw();
void move();
void action();
~Character() {};
};
And my main.cpp looks like this
#include <iostream>
#include "character.h"
#define WIDTH 600
#define HEIGHT 600
using namespace std;
Character simple;
void render()
{
simple.draw();
}
int main(int argc, char** argv)
{
GLFWwindow *window;
if (!glewInit())
exit(EXIT_FAILURE);
if (!glfwInit())
exit(EXIT_FAILURE);
window = glfwCreateWindow(WIDTH, HEIGHT, "Imensia", NULL, NULL);
if (!window) { glfwTerminate(); exit(EXIT_FAILURE); }
glm::vec3 left(-0.5, 0, 0);
glm::vec3 top(0, 0.5, 0);
glm::vec3 right(0.5, 0, 0);
simple.init2D(top, left, right);
glfwMakeContextCurrent(window);
while(!glfwWindowShouldClose(window))
{
glViewport(0, 0, WIDTH, HEIGHT);
glClear(GL_COLOR_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(-1, 1, -1, 1, 1, -1);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
render();
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
return 0;
};
Is it problem with initialization or what?
In properties of the project I set include and library directories...
Glew initialization should be done after the windows/opengl context is created.
For the given code, glewInit could be moved right below the glfwCreateWindow:
int main(int argc, char** argv)
{
GLFWwindow *window;
if (!glfwInit())
exit(EXIT_FAILURE);
window = glfwCreateWindow(WIDTH, HEIGHT, "Imensia", NULL, NULL);
if (!window) { glfwTerminate(); exit(EXIT_FAILURE); }
if (!glewInit())
exit(EXIT_FAILURE);
:
:
}
Keugyeols approach has helped me fix the problem. If you follow the tutorial mentioned on glfws docs, this may help you.
int main(int argc, char** argv){
GLFWwindow* window;
if (!glfwInit()) {
return -1;
}
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window) {
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
if(!glewInit()){
return 0;
}
/*your code
*
*/
}