What I'm trying to do is make it so that if I replace the window I'm rendering with a new window, which could happen because the user switches screens, or switches from fullscreen to windowed, or for any number of other reasons.
My code so far looks like this:
"Context.h"
struct window_deleter {
void operator()(GLFWwindow * window) const;
};
class context {
std::unique_ptr<GLFWwindow, window_deleter> window;
public:
context(int width, int height, const char * s, GLFWmonitor * monitor, GLFWwindow * old_window, bool borderless);
GLFWwindow * get_window() const;
void make_current() const;
};
"Context.cpp"
context::context(int width, int height, const char * s, GLFWmonitor * monitor, GLFWwindow * old_window, bool borderless) {
if (!glfwInit()) throw std::runtime_error("Unable to Initialize GLFW");
if (borderless) glfwWindowHint(GLFW_DECORATED, 0);
else glfwWindowHint(GLFW_DECORATED, 1);
window.reset(glfwCreateWindow(width, height, s, monitor, old_window));
if (!window) throw std::runtime_error("Unable to Create Window");
make_current();
}
GLFWwindow * context::get_window() const {
return window.get();
}
void context::make_current() const {
glfwMakeContextCurrent(window.get());
}
"WindowManager.h"
#include "Context.h"
class window_style;
/* window_style is basically a really fancy "enum class", and I don't
* believe its implementation or interface are relevant to this project.
* I'll add it if knowing how it works is super critical.
*/
class window_manager {
context c_context;
uint32_t c_width, c_height;
std::string c_title;
window_style c_style;
std::function<bool()> close_test;
std::function<void()> poll_task;
public:
static GLFWmonitor * get_monitor(window_style style);
window_manager(uint32_t width, uint32_t height, std::string const& title, window_style style);
context & get_context();
const context & get_context() const;
bool resize(uint32_t width, uint32_t height, std::string const& title, window_style style);
std::function<bool()> get_default_close_test();
void set_close_test(std::function<bool()> const& test);
std::function<void()> get_default_poll_task();
void set_poll_task(std::function<void()> const& task);
void poll_loop();
};
"WindowManager.cpp"
GLFWmonitor * window_manager::get_monitor(window_style style) {
if (style.type != window_style::style_type::fullscreen) return nullptr;
if (!glfwInit()) throw std::runtime_error("Unable to initialize GLFW");
int count;
GLFWmonitor ** monitors = glfwGetMonitors(&count);
if (style.monitor_number >= uint32_t(count)) throw invalid_monitor_exception{};
return monitors[style.monitor_number];
}
std::function<bool()> window_manager::get_default_close_test() {
return [&] {return glfwWindowShouldClose(c_context.get_window()) != 0; };
}
window_manager::window_manager(uint32_t width, uint32_t height, std::string const& title, window_style style) :
c_context(int(width), int(height), title.c_str(), get_monitor(style), nullptr, style.type == window_style::style_type::borderless),
c_width(width), c_height(height), c_title(title), c_style(style), close_test(get_default_close_test()), poll_task(get_default_poll_task()) {
}
context & window_manager::get_context() {
return c_context;
}
const context & window_manager::get_context() const {
return c_context;
}
bool window_manager::resize(uint32_t width, uint32_t height, std::string const& title, window_style style) {
if (width == c_width && height == c_height && title == c_title && style == c_style) return false;
c_width = width;
c_height = height;
c_title = title;
c_style = style;
c_context = context(int(width), int(height), title.c_str(), get_monitor(style), get_context().get_window(), style.type == window_style::style_type::borderless);
return true;
}
void window_manager::set_close_test(std::function<bool()> const& test) {
close_test = test;
}
std::function<void()> window_manager::get_default_poll_task() {
return [&] {glfwSwapBuffers(c_context.get_window()); };
}
void window_manager::set_poll_task(std::function<void()> const& task) {
poll_task = task;
}
void window_manager::poll_loop() {
while (!close_test()) {
glfwPollEvents();
poll_task();
}
}
"Main.cpp"
int main() {
try {
glfwInit();
const GLFWvidmode * vid_mode = glfwGetVideoMode(glfwGetPrimaryMonitor());
gl_backend::window_manager window(vid_mode->width, vid_mode->height, "First test of the window manager", gl_backend::window_style::fullscreen(0));
glfwSetKeyCallback(window.get_context().get_window(), [](GLFWwindow * window, int, int, int, int) {glfwSetWindowShouldClose(window, 1); });
glbinding::Binding::initialize();
//Anything with a "glresource" prefix is basically just a std::shared_ptr<GLuint>
//with some extra deletion code added.
glresource::vertex_array vao;
glresource::buffer square;
float data[] = {
-.5f, -.5f,
.5f, -.5f,
.5f, .5f,
-.5f, .5f
};
gl::glBindVertexArray(*vao);
gl::glBindBuffer(gl::GL_ARRAY_BUFFER, *square);
gl::glBufferData(gl::GL_ARRAY_BUFFER, sizeof(data), data, gl::GL_STATIC_DRAW);
gl::glEnableVertexAttribArray(0);
gl::glVertexAttribPointer(0, 2, gl::GL_FLOAT, false, 2 * sizeof(float), nullptr);
std::string vert_src =
"#version 430\n"
"layout(location = 0) in vec2 vertices;"
"void main() {"
"gl_Position = vec4(vertices, 0, 1);"
"}";
std::string frag_src =
"#version 430\n"
"uniform vec4 square_color;"
"out vec4 fragment_color;"
"void main() {"
"fragment_color = square_color;"
"}";
glresource::shader vert(gl::GL_VERTEX_SHADER, vert_src);
glresource::shader frag(gl::GL_FRAGMENT_SHADER, frag_src);
glresource::program program({ vert, frag });
window.set_poll_task([&] {
gl::glUseProgram(*program);
gl::glBindVertexArray(*vao);
glm::vec4 color{ (glm::sin(float(glfwGetTime())) + 1) / 2, 0.f, 0.5f, 1.f };
gl::glUniform4fv(gl::glGetUniformLocation(*program, "square_color"), 1, glm::value_ptr(color));
gl::glDrawArrays(gl::GL_QUADS, 0, 4);
glfwSwapBuffers(window.get_context().get_window());
});
window.poll_loop();
window.resize(vid_mode->width, vid_mode->height, "Second test of the window manager", gl_backend::window_style::fullscreen(1));
glfwSetKeyCallback(window.get_context().get_window(), [](GLFWwindow * window, int, int, int, int) {glfwSetWindowShouldClose(window, 1); });
window.poll_loop();
}
catch (std::exception const& e) {
std::cerr << e.what() << std::endl;
std::ofstream error_log("error.log");
error_log << e.what() << std::endl;
system("pause");
}
return 0;
}
So the current version of the code is supposed to do the following:
Display a fullscreen window on the primary monitor
On this monitor, display a "square" (rectangle, really....) that over time transitions between magenta and blue, while the background transitions between magenta and a green-ish color.
When the user presses a key, create a new fullscreen window on the second monitor using the first window's context to feed into GLFW's window creation, and destroy the original window (in that order)
Display the same rectangle on this second window
Continue to transition the background periodically
When the user presses a key again, destroy the second window and exit the program.
Of these steps, step 4 doesn't work at all, and step 3 partially works: the window does get created, but it doesn't display by default, and the user has to call it up via the taskbar. All the other steps work as expected, including the transitioning background on both windows.
So my assumption is that something is going wrong with respect to the object sharing between contexts; specifically, it doesn't appear that the second context I'm creating is receiving the objects created by the first context. Is there an obvious logic error I'm making? Should I be doing something else to ensure that context sharing works as intended? Is it possible that there's just a bug in GLFW?
So my assumption is that something is going wrong with respect to the object sharing between contexts; specifically, it doesn't appear that the second context I'm creating is receiving the objects created by the first context. Is there an obvious logic error I'm making?
Yes, your premise is just wrong. Shared OpenGL context will not share the whole state, just the "big" objects which actually hold user-specific data (like VBOs, textures, shaders and programs, renderbuffers and so on), and not the ones which only reference them - state containers like VAOs, FBOs and so on are never shared.
Should I be doing something else to ensure that context sharing works as intended?
Well, if you really want to go that route, you have to re-build all those state containers, and also restore the global state (all those glEnables, the depth buffer setting, blending state, tons of other things) of your original context.
However, I find your whole concept doubtful here. You do not need to destroy a window when going from fullscreen to windowed, or to a different monitor on the same GPU, and GLFW directly supports that via glfwSetWindowMonitor().
And even if you do re-create a window, this does not imply that you have to re-create the GL context. There might be some restrictions imposed by GLFWs API in that regard, but the underlying concepts are separate. You basically can make the old context current in the new window, and are just done with it. GLFW just inseperably links Window and Context together, which is kind of an unfortunate abstraction.
However, the only scenario I could imagine where re-creating the window would be necessary is something where different screens are driven be different GPUs - but GL context sharing won't work across different GL implementations, so even in that scenario, you would have to rebuild the whole context state.
Related
Rewriting this to try to provide some clarity and update the code with some things that have changed.
I am restructuring a project that used SDL2, and have encountered issues trying to create a blank window. I have attempted to structure the project similarly to the original by separating all functionality dealing with SDL_Window into its own class. If I move the call to SDL_CreateWindow into the same class as the event loop or move the event loop to the same class as the window, the window is created and shown as expected, however as it is now, the window appears to be created successfully (SDL_CreateWindow is not returning NULL) and the program doesn't seem to be hanging, but it does not display a window while the program is running.
The SDL_Window is created in the Graphics class and stored in a member variable:
Graphics::Graphics(const char* title, unsigned int w, unsigned int h, unsigned int flags, int& status) {
screen = SDL_CreateWindow(title,
SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
w, h,
flags);
status = 0;
if (screen == NULL)
status = 1;
}
Graphics is instantiated in the Window class and stored in a member variable.
Window::Window(const char* title, unsigned int w, unsigned int h, unsigned int flags, int& status) {
g = Graphics(title, w,h, flags, status);
}
Window is instantiated in main, and if the window is created successfully, it starts the event loop.
{
int status;
Window window("Mirari", 640,480, SDL_WINDOW_SHOWN, status);
if (status == 0) {
window.eventLoop();
} else {
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't create window and renderer: %s", SDL_GetError());
return 1;
}
}
The event loop itself to be thorough (update and draw are both currently empty functions).
void Window::eventLoop() {
SDL_Event ev;
bool running = true;
while (running) {
const int start_time = SDL_GetTicks();
while (SDL_PollEvent(&ev)) {
switch (ev.type) {
case SDL_QUIT:
running = false;
break;
default:
break;
}
}
//update();
//draw();
std::cout << "." << std::endl;
const int elapsed = SDL_GetTicks() - start_time;
if (elapsed < 1000 / FPS)
SDL_Delay(1000 / FPS - elapsed);
}
}
SDL is initialized with this static function and these flags.
void Window::init(unsigned int sdl_flags, IMG_InitFlags img_flags) {
SDL_Init(sdl_flags);
IMG_Init(img_flags);
TTF_Init();
}
...
Window::init(SDL_INIT_VIDEO | SDL_INIT_EVENTS | SDL_INIT_TIMER, IMG_INIT_PNG);
I know that a window can be created in a separate class because the first version of this project did that and it worked, I'm just not sure what has changed that is causing the window not to show up.
As said by some programmer dude, you design is not perfect and should be thought again.
Nevertheless, from what we can see on your code : If the Window constructor is called (and the SDL_Init was called before, which I assume so), then the windows should be created.
From there we only can guess what we can't see (as it's not part of what you are displaying) :
is the definition of SDL_WINDOWPOS_UNDEFINED, the same in both context ?
is the screen variable definition the same in both context ?
is the "screen" used in "update", or "draw" method, and, as uninitialized : it fails
... ?
As you probably are new to development, I suggest you adopt this habit very early : your code should check and report everything it does. A good program is easy to debug, as it says what's wrong
For instance, just after :
screen = SDL_CreateWindow(title, SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
w, h, flags);
you may want to write something like :
if(!screen)
{
std::cout << "Failed to create window\n";
return -1;
}
or better :
if(!screen)
{
throw std::exception("Failed to create window\n");
}
And so on.
For instance, in your function update, you may want to have something like :
if(!screen)
{
throw std::exception("Unable to update the display as it is uninitialized\n");
}
I assume your application would not end without any comment... but that's a guess
Sorry for the long title, but I have a very specific problem that can't really be expressed any more concisely. I'm programming a game engine (GitHub link: here) and I'm trying to let the client create windows on top of the main window which the application supplies automatically.
I've completely managed to get this to work but I'm bothered with the framerate of the main window when it enters full-screen mode (either on initialization or when the user presses alt+enter). I haven't benchmarked the performance, but it is visibly bad (so probably around 20-30 FPS) and the performance only drops when the user creates another window (it doesn't even have to be showing).
Since all of the windows the user creates are children of the main window, I have to hide them before entering full-screen mode.
I have a lot of code in my window class (over 1000 lines), so giving you a minimal example will be very difficult. If you must see the code, please visit the GitHub repo (under platform/windows you will find the code I'm referencing). I wonder if this is a strange artifact of having multiple windows open in the same process, or if I'm just missing some code.
That being said, here is some actual client code:
SandboxApp.h
#pragma once
#include<Infinity.h>
class SandboxApp : public Infinity::Application
{
private:
Infinity::Window *m_popup_window;
Infinity::Rasterizer *m_rasterizer;
Infinity::OrthoCamera m_camera;
Infinity::Renderer2D m_renderer;
Infinity::Texture2D *m_texture;
float m_aspect_ratio;
public:
SandboxApp();
~SandboxApp();
void OnApplicationEntered(Infinity::ApplicationEnteredEvent *event) override;
void OnUserCreate(Infinity::UserCreateEvent *event) override;
void OnUserUpdate(Infinity::UserUpdateEvent *event) override;
void OnUserRender(Infinity::UserRenderEvent *event) override;
void OnUserDestroy(Infinity::UserDestroyEvent *event) override;
void OnWindowResized(Infinity::WindowResizedEvent *event) override;
void Exit(const char *message);
};
SanboxApp.cpp
#define INFINITY_ENTRY_POINT
#include"SandboxApp.h"
SandboxApp::SandboxApp():
m_popup_window(nullptr),
m_rasterizer(nullptr),
m_renderer(),
m_texture(),
m_aspect_ratio(),
m_camera()
{}
SandboxApp::~SandboxApp()
{}
void SandboxApp::Exit(const char *message)
{
INFINITY_CLIENT_ERROR(message);
RequestExit();
}
void SandboxApp::OnApplicationEntered(Infinity::ApplicationEnteredEvent *event)
{
Infinity::Window::WindowParams ¶ms = event->GetMainWindowParams();
params.fullscreen = true;
params.auto_show = false;
}
void SandboxApp::OnUserCreate(Infinity::UserCreateEvent *event)
{
Infinity::Window *window = GetMainWindow();
m_popup_window = Infinity::Window::CreateWindow();
Infinity::Window::WindowParams window_params;
window_params.width = 300;
window_params.height = 300;
window_params.title = "Popup window!";
if (!m_popup_window->Init(window_params))
{
Exit("Error initializing popup window");
return;
}
// Set clear color
Infinity::Context *context = Infinity::Window::GetContext();
context->SetClearColor(0.0f, 0.0f, 0.0f, 1.0f);
m_popup_window->MakeContextCurrent();
context = Infinity::Window::GetContext();
context->SetClearColor(0.0f, 0.0f, 1.0f, 1.0f);
window->MakeContextCurrent();
// Initialize other resources
m_rasterizer = Infinity::Rasterizer::CreateRasterizer();
if (!m_rasterizer->Init(Infinity::Rasterizer::CullMode::NONE, true))
{
Exit("Error initializing rasterizer");
return;
}
m_rasterizer->Bind();
if (!m_renderer.Init())
{
Exit("Error initializing Renderer2D");
return;
}
m_texture = Infinity::Texture2D::CreateTexture();
if (!m_texture->Init("assets/image.png"))
{
Exit("Error initializing texture");
return;
}
INFINITY_CLIENT_INFO("Client created");
window->Show();
m_popup_window->Show();
event->Consume();
}
void SandboxApp::OnUserUpdate(Infinity::UserUpdateEvent *event)
{
Infinity::Window *window = GetMainWindow();
if (KeyPressed(Infinity::KeyCode::Escape))
{
if (window->CursorEnabled())
{
window->DisableCursor();
}
else
{
window->EnableCursor();
}
}
if (window->CursorEnabled())
{
event->Consume();
return;
}
float speed = (float)(3.0 * event->GetDT());
float r_speed = (float)(2.0 * event->GetDT());
float z_speed = (float)(1.0 * event->GetDT());
if (KeyDown(Infinity::KeyCode::Left)) { m_camera.MoveLeft(speed); }
if (KeyDown(Infinity::KeyCode::Right)) { m_camera.MoveRight(speed); }
if (KeyDown(Infinity::KeyCode::Down)) { m_camera.MoveBackward(speed); }
if (KeyDown(Infinity::KeyCode::Up)) { m_camera.MoveForward(speed); }
if (KeyDown(Infinity::KeyCode::W)) { m_camera.zoom += z_speed; }
if (KeyDown(Infinity::KeyCode::S)) { m_camera.zoom -= z_speed; }
if (KeyDown(Infinity::KeyCode::A)) { m_camera.roll -= r_speed; }
if (KeyDown(Infinity::KeyCode::D)) { m_camera.roll += r_speed; }
m_camera.Update(m_aspect_ratio);
event->Consume();
}
void SandboxApp::OnUserRender(Infinity::UserRenderEvent *event)
{
Infinity::Window *window = GetMainWindow();
window->MakeContextCurrent();
Infinity::Context *context = Infinity::Window::GetContext();
context->Clear();
m_renderer.StartScene(&m_camera);
Infinity::Renderer2D::QuadParams quad;
quad.position = { 0.0f, 0.0f };
quad.size = { 1.0f, 1.0f };
quad.color = { 1.0f, 0.0f, 0.0f, 1.0f };
m_renderer.DrawQuad(quad);
m_renderer.EndScene();
m_popup_window->MakeContextCurrent();
context = Infinity::Window::GetContext();
context->Clear();
window->MakeContextCurrent();
event->Consume();
}
void SandboxApp::OnUserDestroy(Infinity::UserDestroyEvent *event)
{
m_renderer.Destroy();
if (m_rasterizer)
{
m_rasterizer->Destroy();
delete m_rasterizer;
}
if (m_texture)
{
m_texture->Destroy();
delete m_texture;
}
if (m_popup_window)
{
m_popup_window->Destroy();
delete m_popup_window;
}
INFINITY_CLIENT_INFO("Client destroyed");
event->Consume();
}
void SandboxApp::OnWindowResized(Infinity::WindowResizedEvent *event)
{
if (event->GetWindow() == GetMainWindow())
{
m_aspect_ratio = (float)event->GetWidth() / (float)event->GetHeight();
m_camera.Update(m_aspect_ratio);
event->Consume();
}
}
Infinity::Application *Infinity::CreateApplication()
{
return new SandboxApp;
}
If you need any other information, please just leave a comment.
Thanks in advance! :)
Update
I tried adding my executables to the Graphics Performance options list but it didn't change the low framerate of the full-screen window.
I did some more testing and found out that that I only need to create the sub-window for these inefficiencies to occur. Even if I don't show, update or render to the window, simply creating it slows down the frame rate of my full-screen main window.
Trying to do more research, I realized that MSDN does not have any documentation on using multiple DXGI swap chains. My hunch is that setting the full-screen state of one swap chain to true somehow interferes with the other swap chain causing inefficiencies (Although my ID3D11Device debug output doesn't mention inefficiencies anywhere)
There’re 2 kinds of full-screen in Windows.
True fullscreen is probably what you’re doing, calling IDXGISwapChain::SetFullscreenState. In this mode, the windows desktop compositor, dwm.exe, is disabled, giving your app exclusive access to the monitor. However, as you found out it has complications. There’re more of them BTW: alt+tab can be slow, popups from other applications may glitch, and also it’s tricky to implement correctly, I think you have to re-create a swap chain and all render targets. On the bright side, in some rare cases it may improve FPS by a single-digit number.
Borderless windowed is what you should be doing since you want multiple Win32 windows running along with your fullscreen content. In this mode the desktop compositor is up and running, updating your window along others. Switching in and out that mode is also way easier, example below.
HRESULT WindowImpl::maximizeBorderless( bool max )
{
// https://devblogs.microsoft.com/oldnewthing/20100412-00/?p=14353
DWORD dwStyle = GetWindowLong( GWL_STYLE );
if( max )
{
MONITORINFO mi = { sizeof( mi ) };
if( GetWindowPlacement( &m_wpPrev ) && GetMonitorInfo( MonitorFromWindow( m_hWnd, MONITOR_DEFAULTTOPRIMARY ), &mi ) )
{
SetWindowLong( GWL_STYLE, dwStyle & ~WS_OVERLAPPEDWINDOW );
SetWindowPos( HWND_TOP,
mi.rcMonitor.left, mi.rcMonitor.top,
mi.rcMonitor.right - mi.rcMonitor.left,
mi.rcMonitor.bottom - mi.rcMonitor.top,
SWP_NOOWNERZORDER | SWP_FRAMECHANGED );
}
}
else
{
SetWindowLong( GWL_STYLE, dwStyle | WS_OVERLAPPEDWINDOW );
SetWindowPlacement( &m_wpPrev );
SetWindowPos( NULL, 0, 0, 0, 0,
SWP_NOMOVE | SWP_NOSIZE | SWP_NOZORDER |
SWP_NOOWNERZORDER | SWP_FRAMECHANGED );
}
return S_OK;
}
I am in the process of learning OpenGL on Linux but I can't get mode switching working (windowed to full screen and back).
The window appears to be going into full screen but but not looking correct. To switch modes a new window is being created and old one destroyed.
void OpenGLWindow::FullScreen(bool fullScreen, int width, int height)
{
GLFWwindow *oldHandle = m_window;
m_fullscreen = fullScreen;
m_width = width;
m_height = height;
m_window = glfwCreateWindow(width, height, m_caption.c_str(),
fullScreen ? m_monitor : NULL, m_window);
if (m_window == NULL)
{
glfwTerminate();
throw std::runtime_error("Failed to recreate window.");
}
glfwDestroyWindow(oldHandle);
m_camera->Invalidate();
// Use entire window for rendering.
glViewport(0, 0, width, height);
glfwMakeContextCurrent(m_window);
glfwSwapInterval(1);
if (m_keyboardHandler) SetKeyboardHandler(m_keyboardHandler);
}
Initial Window
Full Screen (incorrect)
Return to Windowed
Updates to Question
I have updated the code to use your code and getting the same issue. On your suggestion I am now updating the camera, but again no avail :(
void OpenGLCamera::Invalidate()
{
RecalculateProjection(m_perspProjInfo->Width(), m_perspProjInfo->Height());
m_recalculateViewMatrix = true;
m_recalculatePerspectiveMatrix = true;
m_recalculateProjectionMatrix = true;
}
void OpenGLCamera::RecalculateProjection(int width, int height)
{
float aspectRatio = float(width) / height;
float frustumYScale = cotangent(degreesToRadians(
m_perspProjInfo->FieldOfView() / 2));
float frustumXScale = frustumYScale;
if (width > height)
{
// Shrink the x scale in eye-coordinate space, so that when geometry is
// projected to ndc-space, it is widened out to become square.
m_projectionMatrix[0][0] = frustumXScale / aspectRatio;
m_projectionMatrix[1][1] = frustumYScale;
}
else {
// Shrink the y scale in eye-coordinate space, so that when geometry is
// projected to ndc-space, it is widened out to become square.
m_projectionMatrix[0][0] = frustumXScale;
m_projectionMatrix[1][1] = frustumYScale * aspectRatio;
}
}
Rabbid : When I resize:
Rabbid : When I go to full screen:
In the following, I'll describe a small but handy class, which deals with resizing a GLFW window and handles switch fullscreen window on and off.
All the used GLFW functions are well documented in the GLFW documentation.
#include <GL/gl.h>
#include <GLFW/glfw3.h>
#include <array>
#include <stdexcept>
class OpenGLWindow
{
private:
std::array< int, 2 > _wndPos {0, 0};
std::array< int, 2 > _wndSize {0, 0};
std::array< int, 2 > _vpSize {0, 0};
bool _updateViewport = true;
GLFWwindow * _wnd = nullptr;
GLFWmonitor * _monitor = nullptr;
void Resize( int cx, int cy );
public:
void Init( int width, int height );
static void CallbackResize(GLFWwindow* window, int cx, int cy);
void MainLoop ( void );
bool IsFullscreen( void );
void SetFullScreen( bool fullscreen );
};
When creating the window, then the user function pointer (glfwSetWindowUserPointer) is set to the window management class. And the resize callback is set by glfwSetWindowSizeCallback. After the window is created its current size and position can be get by glfwGetWindowPos and glfwGetWindowSize.
void OpenGLWindow::Init( int width, int height )
{
_wnd = glfwCreateWindow( width, height, "OGL window", nullptr, nullptr );
if ( _wnd == nullptr )
{
glfwTerminate();
throw std::runtime_error( "error initializing window" );
}
glfwMakeContextCurrent( _wnd );
glfwSetWindowUserPointer( _wnd, this );
glfwSetWindowSizeCallback( _wnd, OpenGLWindow::CallbackResize );
_monitor = glfwGetPrimaryMonitor();
glfwGetWindowSize( _wnd, &_wndSize[0], &_wndSize[1] );
glfwGetWindowPos( _wnd, &_wndPos[0], &_wndPos[1] );
_updateViewport = true;
}
When the resize notification occurs, then the pointer to the window management class can be get by glfwGetWindowUserPointer:
static void OpenGLWindow::CallbackResize(GLFWwindow* window, int cx, int cy)
{
void *ptr = glfwGetWindowUserPointer( window );
if ( OpenGLWindow *wndPtr = static_cast<OpenGLWindow*>( ptr ) )
wndPtr->Resize( cx, cy );
}
Any change of the window size is notified and the new window size is stored (glfwGetWindowSize):
void OpenGLWindow::Resize( int cx, int cy )
{
_updateViewport = true;
}
When the window size has changed, then the viewport has to be suited to the window size (glViewport). This can be done in the main loop of the application:
void OpenGLWindow::MainLoop ( void )
{
while (!glfwWindowShouldClose(_wnd))
{
if ( _updateViewport )
{
glfwGetFramebufferSize( _wnd, &_vpSize[0], &_vpSize[1] );
glViewport( 0, 0, _vpSize[0], _vpSize[1] );
_updateViewport = false;
}
// ..... render the scene
glfwSwapBuffers(_wnd);
glfwPollEvents();
}
}
If the current window is in full screen mode, can be achieved by asking for the monitor that the window uses for full screen mode (glfwGetWindowMonitor):
bool OpenGLWindow::IsFullscreen( void )
{
return glfwGetWindowMonitor( _wnd ) != nullptr;
}
To switch the full screen mode on and off, glfwSetWindowMonitor has to be called, either with the monitor for the full screen mode, or with nullptr:
void OpenGLWindow::SetFullScreen( bool fullscreen )
{
if ( IsFullscreen() == fullscreen )
return;
if ( fullscreen )
{
// backup window position and window size
glfwGetWindowPos( _wnd, &_wndPos[0], &_wndPos[1] );
glfwGetWindowSize( _wnd, &_wndSize[0], &_wndSize[1] );
// get resolution of monitor
const GLFWvidmode * mode = glfwGetVideoMode(_monitor);
// switch to full screen
glfwSetWindowMonitor( _wnd, _monitor, 0, 0, mode->width, mode->height, 0 );
}
else
{
// restore last window size and position
glfwSetWindowMonitor( _wnd, nullptr, _wndPos[0], _wndPos[1], _wndSize[0], _wndSize[1], 0 );
}
_updateViewport = true;
}
I recommend you to not create a new Window with glfwCreateWindow when you just want to switch between windowed and fullscreen. Use glfwSetWindowMonitor instead.
When you create a window with fullscreen enabled, you have to pass arguments which are compatible with a video mode on the monitor. You can get the standard video mode on the primary monitor like this:
GLFWmonitor *monitor = glfwGetPrimaryMonitor();
const GLFWvidmode *mode = glfwGetVideoMode(monitor);
and to switch to fullscreen:
glfwSetWindowMonitor(window, monitor, 0, 0, mode->width, mode->height, mode->refreshRate);
Just pass a nullptr-mode and your own values of course:
glfwSetWindowMonitor(window, nullptr, 0, 0, windowWidth, windowHeight, windowRefreshRate);
And don't forget to resize the viewport and update the camera.
Are you resizing the viewport and updating the camera when the user resizes the window?
There are a couple of issues with your code:
Assuming that glfwCreateWindow will set the resolution to width * height in fullscreen mode is not correct. The GLFW documentation states (emphasis mine):
For full screen windows, the specified size becomes the resolution of the window's desired video mode. As long as a full screen window is not iconified, the supported video mode most closely matching the desired video mode is set for the specified monitor.
Assuming that the window size is specified in "pixels" is not correct either.Quoting the relevant part of the documentation again:
While the size of a window is measured in screen coordinates, OpenGL works with pixels. The size you pass into glViewport, for example, should be in pixels. On some machines screen coordinates and pixels are the same, but on others they will not be. There is a second set of functions to retrieve the size, in pixels, of the framebuffer of a window.
Issues 1 and 2 can be solved by simply calling glfwGetFramebufferSize after the window was created. This leaves us with issue 3:
You call glViewport without having a current GL context -
resulting in undefined behavior, and especially in not setting the viewport at all. Now that is actually an interesting one, because the initial viewport for the new context will be the full new window, so that your mistakes 1 and 2 have no direct effect. They still might have some effect later if your code relies on m_width and m_height containing useful values, though.
I trying to render an OSG scene into a image in my Qt program. Refer to the example of SnapImageDrawCallback(https://www.mail-archive.com/osg-users#lists.openscenegraph.org/msg45360.html).
class SnapImageDrawCallback : public osg::CameraNode::DrawCallback {
public:
SnapImageDrawCallback()
{
_snapImageOnNextFrame = false;
}
void setFileName(const std::string& filename) { _filename = filename; }
const std::string& getFileName() const { return _filename; }
void setSnapImageOnNextFrame(bool flag) { _snapImageOnNextFrame = flag;}
bool getSnapImageOnNextFrame() const { return _snapImageOnNextFrame; }
virtual void operator () (const osg::CameraNode& camera) const
{
if (!_snapImageOnNextFrame) return;
int x,y,width,height;
x = camera.getViewport()->x();
y = camera.getViewport()->y();
width = camera.getViewport()->width();
height = camera.getViewport()->height();
osg::ref_ptr<osg::Image> image = new osg::Image;
image->readPixels(x,y,width,height,GL_RGB,GL_UNSIGNED_BYTE);
if (osgDB::writeImageFile(*image,_filename))
{
std::cout << "Saved screen image to `"<<_filename
<<"`"<< std::endl;
}
_snapImageOnNextFrame = false;
}
protected:
std::string _filename;
mutable bool _snapImageOnNextFrame;
};
I set this as a the osg::Viewer's camera's FinalDrawCallback, but I failed with a blank image, and get this warning "Warning: detected OpenGL error 'invalid operation' at start of State::apply()" when invoke image->readPixels, My osgViewer::Viewer in embedded in QQuickFramebufferObject. Can any one give some suggestions?
Not sure to give you the right pointer, you should provide more details about your setup and what you're after.
As a general note, if you're trying to render with OSG into a QtQuick widget the best approach is to have osg to render to an FBO in a separate shared GL context, and copy the FBO contents back the qtquick widget.
I had tested this approach some times ago, see code here:
https://github.com/rickyviking/qmlosg
Another similar project here: https://github.com/podsvirov/osgqtquick
you can use pbo
ext->glGenBuffers(1, &pbo);
ext->glBindBuffer(GL_PIXEL_PACK_BUFFER_ARB, pbo);
ext->glBufferData(GL_PIXEL_PACK_BUFFER_ARB, _width*_height*4, 0, GL_STREAM_READ);
glReadPixels(0, 0, _width, _height, _pixelFormat, _type, 0);
GLubyte* src = (GLubyte*)ext->glMapBuffer(GL_PIXEL_PACK_BUFFER_ARB,
GL_READ_ONLY_ARB);
if(src)
{
memcpy(image->data(), src, _width*_height*4);
ext->glUnmapBuffer(GL_PIXEL_PACK_BUFFER_ARB);
}
ext->glBindBuffer(GL_PIXEL_PACK_BUFFER_ARB, 0);
EDIT* I rearranged the initialization list, as suggested by much_a_chos, so that the Window object initializes before the Game object, ensuring that glew is initialized first. However, this did not work:
//Rearranged initialization list
class TempCore
{
public:
TempCore(Game* g) :
win(new Window(800, 800, "EngineTry", false)), gamew(g) {}
~TempCore() { if(gamew) delete gamew; }
...
};
And here is the code I changed in the Mesh constructor when the above didn't work:
Mesh::Mesh( Vertex* vertices, unsigned int numVerts )
{
m_drawCount = numVerts;
glewExperimental = GL_TRUE;
if(glewInit() != GLEW_OK){
exit(-150); //application stops and exits here with the code -150
}
glGenVertexArrays(1, &m_vertexArrayObject);
glBindVertexArray(m_vertexArrayObject);
...
}
What happens when I compile and run is surprising. The program exits at the if(glewInit() != GLEW_OK) I copied from the Window constructor. For some reason glew initializes properly in the Window constructor (which is called before the Game constructor), but it fails to initialize when called the second time in the Mesh constructor. I assume its bad practice to call on glewInit() more than once in a program, but I don't think it should fail if I actually did so. Does anybody know what might be happening? Am I making a mistake in calling glewInit() more than once?
*END OF EDIT
I've been following a 3D Game Engine Development tutorial and I've encountered a weird bug in my code, which I will demonstrate below. I'm attempting to make my own game-engine purely for educational reasons. I'm using Code-blocks 13.12 as my IDE and mingw-w64 v4.0 as my compiler. I'm also using SDL2, glew, Assimp and boost as my third-party libraries.
I apologize in advance for the numerous code extracts, but I put in what I thought what was necessary to understand the context of the error.
I have a Core class for my game-engine that holds the main loop and updates and renders accordingly, calling the Game class update() and render() methods in the process as well. The Game class is intended as the holder for all the assets in the game, and will be the base class for any games made using the engine, thus, it contains mesh, texture and camera references. The Game class update(), render() and input() methods are all virtual as the Game class is meant to be derived.
My problem is: when I initialize the Game member variable in the Core class, I get a SIGSEGV (i.e. segmentation fault) in the Mesh object's constructor at the glGenVertexArrays call.
However, when I move my Game object out of the Core class and straight into the main method (so I changed it from being a class member to a simple scoped variable in the main method), along with the necessary parts from the Core class, then its runs perfectly and renders my rudimentary triangle example. This is a bug I've never come across and I would really appreciate any help I can get.
Below is an extract of my morphed code that ran perfectly and rendered the triangle:
int WINAPI WinMain (HINSTANCE hThisInstance, HINSTANCE hPrevInstance, LPSTR lpszArgument, int nCmdShow)
{
Window win(800, 800, "EngineTry", false); //Creates an SDL implemented window with a GL_context
Game* gamew = new Game;
const double frameTime = 1.0 / 500; //500 = maximum fps
double lastTime = FTime::getTime(); //gets current time in milliseconds
double unprocessedTime = 0.0;
int frames = 0;
double frameCounter = 0;
while(win.isRunning()){
bool _render = false;
double startTime = FTime::getTime();
double passedTime = startTime - lastTime;
lastTime = startTime;
unprocessedTime += passedTime / (double)FTime::SECOND;
frameCounter += passedTime;
while(unprocessedTime > frameTime){
if(!win.isRunning())
exit(0);
_render = true;
unprocessedTime -= frameTime;
FTime::delta = frameTime;
gamew->input();
Input::update();
gamew->update();
if(frameCounter >= FTime::SECOND)
{
std::cout << "FPS: " << frames << std::endl;
frames = 0;
frameCounter = 0;
}
}
if(_render){
RenderUtil::clearScreen(); //simple wrapper to the glClear function
gamew->render();
win.Update();
frames++;
}else{
Sleep(1);
}
}
delete gamew;
return 0;
}
Here is an extract of my modified Core class that doesn't work (throws the sigsegv in the Mesh constructor)
class TempCore
{
public:
TempCore(Game* g) :
gamew(g), win(800, 800, "EngineTry", false) {}
~TempCore() { if(gamew) delete gamew; }
void start();
private:
Window win;
Game* gamew;
};
int WINAPI WinMain (HINSTANCE hThisInstance, HINSTANCE hPrevInstance, LPSTR lpszArgument, int nCmdShow)
{
TempCore m_core(new Game());
m_core.start();
return 0;
}
void TempCore::start()
{
const double frameTime = 1.0 / 500;
double lastTime = FTime::getTime();
double unprocessedTime = 0.0;
int frames = 0;
double frameCounter = 0;
while(win.isRunning()){
bool _render = false;
double startTime = FTime::getTime();
double passedTime = startTime - lastTime;
lastTime = startTime;
unprocessedTime += passedTime / (double)FTime::SECOND;
frameCounter += passedTime;
while(unprocessedTime > frameTime){
if(!win.isRunning())
exit(0);
_render = true;
unprocessedTime -= frameTime;
FTime::delta = frameTime;
gamew->input();
Input::update();
gamew->update();
if(frameCounter >= FTime::SECOND){
//double totalTime = ((1000.0 * frameCounter)/((double)frames));
//double totalMeasuredTime = 0.0;
std::cout << "Frames: " << frames << std::endl;
//m_frames_per_second = frames;
frames = 0;
frameCounter = 0;
}
}
if(_render){
RenderUtil::clearScreen();
gamew->render();
win.Update();
frames++;
}else{
Sleep(1);
}
}
}
Mesh constructor where the sigsegv occurs in the above TestCore implementation:
Mesh::Mesh( Vertex* vertices, unsigned int numVerts )
{
m_drawCount = numVerts;
glGenVertexArrays(1, &m_vertexArrayObject); //sigsegv occurs here
glBindVertexArray(m_vertexArrayObject);
std::vector<glm::vec3> positions;
std::vector<glm::vec2> texCoords;
positions.reserve(numVerts);
texCoords.reserve(numVerts);
for(unsigned i = 0; i < numVerts; i++){
positions.push_back(vertices[i].pos);
texCoords.push_back(vertices[i].texCoord);
}
glGenBuffers(NUM_BUFFERS, m_vertexArrayBuffers);
glBindBuffer(GL_ARRAY_BUFFER, m_vertexArrayBuffers[POSITION_VB]);
glBufferData(GL_ARRAY_BUFFER, numVerts*sizeof(positions[0]), &positions[0], GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, m_vertexArrayBuffers[TEXCOORD_VB]);
glBufferData(GL_ARRAY_BUFFER, numVerts*sizeof(texCoords[0]), &texCoords[0], GL_STATIC_DRAW);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, 0);
glBindVertexArray(0);
}
The Game constructor that initializes the Mesh object:
Vertex vertices[] = { Vertex(-0.5f, -0.5f, 0, 0, 0),
Vertex(0, 0.5f, 0, 0.5f, 1.0f),
Vertex(0.5f, -0.5f, 0, 1.0f, 0)};
//Vertex is basically a struct with a glm::vec3 for position and a glm::vec2 for texture coordinate
Game::Game() :
m_mesh(vertices, sizeof(vertices)/sizeof(vertices[0])),
m_shader("res\\shaders\\basic_shader"),
m_texture("res\\textures\\mist_tree.jpg")
{
}
The Window class constructor that initializes glew:
Window::Window(int width, int height, const std::string& title, bool full_screen) :
m_fullscreen(full_screen)
{
SDL_Init(SDL_INIT_EVERYTHING);
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BUFFER_SIZE, 32);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
//SDL_Window* in private of class declaration
m_window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, width, height, SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE);
//SDL_GLContext in private of class declaration
m_glContext = SDL_GL_CreateContext(m_window);
std::cout << "GL Version: " << glGetString(GL_VERSION) << std::endl;
glewExperimental = GL_TRUE;
if(glewInit() != GLEW_OK || !glVersionAbove(3.0)){
std::cerr << "Glew failed to initialize...\n";
exit(-150);
}
}
A long shot here, since the given amount of information is pretty big. I searched for similar questions like this one and this one, but every one of them have been answered with tricks you're doing in your Window class constructor that have to be called before your game constructor. And as I can see in your TempCore constructor, you build your game object (and make a call to glGenVertexArrays) before your Window object is constructed
...
TempCore(Game* g) :
gamew(g), win(800, 800, "EngineTry", false) {}
...
So before making calls for creating your OpenGL context with SDL_GL_CreateContext(m_window) and before glewExperimental = GL_TRUE; glewInit();. And then you say that putting it in the main in this order solves the problem...
...
Window win(800, 800, "EngineTry", false); //Creates an SDL implemented window with a GL_context
Game* gamew = new Game;
...
Maybe reordering your initialization list in your constructor like this could solve your problem?
class TempCore
{
public:
TempCore(Game* g) :
win(800, 800, "EngineTry", false), gamew(g) {}
~TempCore() { if(gamew) delete gamew; }
...
};
UPDATE
I was wrong, as stated in the comments, the initialization list order doesn't matter. It's the definition order that matters, which is correct here...
Thanks to both #much_a_chos and #vu1p3n0x for your help. Turns out much_a_chos had the right idea with the Game object initializing before the Window object, thus missing the glewInit() call altogether, resulting in the sigsegv error. The problem, however, was not in the initializer list but in the main.cpp file. I was creating a Game class object and then passing that Game object via pointer to the core class, so regardless of how I arranged the Core class, the Game object would always initialize before the Window class, and would therefore always do its glGenVertexArrays call before glewInit() is called. This is a terrible logic error on my side and I apologize for wasting your time.
Below are extracts from the fixed main.cpp file and the fixed TempCore class (please keep in mind that these are temporary fixes to illustrate how I would go about fixing my mistake):
class TempCore
{
public:
TempCore(Window* w, Game* g) : //take in a Window class pointer to ensure its created before the Game class constructor
win(w), gamew(g) {}
~TempCore() { if(gamew) delete gamew; }
void start();
private:
Window* win;
Game* gamew;
};
int WINAPI WinMain (HINSTANCE hThisInstance, HINSTANCE hPrevInstance, LPSTR lpszArgument, int nCmdShow)
{
Window* win = new Window(800, 800, "EngineTry", false); //this way the Window constructor with the glewinit() call is called before the Game contructor
TempCore m_core(win, new Game());
m_core.start();
return 0;
}
Addressing your edit: You should not call glewInit() more than once. I'm not familiar with glew in this regard but in general, anything should only be "initialized" once. glew probably assumes that it is uninitialized and errors out when some initialization is already there.
I'd recommend calling glewInit() at the very beginning of the program and not in an object constructor. (Unless you have that object "own" glew)
Edit: It seems my assumption about glewInit() was slightly wrong. glewInit() behaves differently depending on the build, but regardless should only be called if you switch contexts. However, because you aren't changing context (from what I see) you should not call it more than once.