OpenGL failed Creating basic Window - c++

I'm trying to compile a basic OpenGL program on Ubuntu, but I got a runtime error, the window I created crash!
Here is my code:
#include<iostream>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
const GLint WIDTH = 800 , HEIGH = 600;
int main ()
{
if (!glfwInit())
{
std::cerr << "GLFW initialization failed" << std::endl ;
glfwTerminate();
return 1 ;
}
//setup GLFW window properties
//OpenGL version 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR,3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR,3);
glfwWindowHint(GLFW_OPENGL_PROFILE,GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT,GL_TRUE);
GLFWwindow *main_window = glfwCreateWindow(WIDTH,HEIGH,"TestWindow",NULL,NULL);
if(!main_window)
{
std::cerr << "GLFW creation failed " << std::endl;
glfwTerminate();
return 1 ;
}
int bufferWidth,bufferHeight ;
glfwGetFramebufferSize(main_window,&bufferWidth,&bufferHeight);
glfwMakeContextCurrent(main_window);
glewExperimental = GL_TRUE ;
if(glewInit() != GLEW_OK)
{
glfwDestroyWindow(main_window);
glfwTerminate();
return 1 ;
}
glViewport(0,0,bufferWidth,bufferHeight);
// loop until window closed
while(!glfwWindowShouldClose(main_window));
{
glfwPollEvents();
glClearColor(1.0f,0.0f,0.0f,1.0f);
glClear(GL_COLOR_BUFFER_BIT);``
glfwSwapBuffers(main_window);
}
return 0 ;
}
This is the output i get:
i don't Know if it's related to my Graphic Card:
description: VGA compatible controller
product: GK107GLM [Quadro K1100M]
vendor: NVIDIA Corporation
physical id: 0
bus info: pci#0000:01:00.0
version: a1
width: 64 bits
clock: 33MHz
capabilities: vga_controller bus_master cap_list rom
configuration: driver=nouveau latency=0
resources: irq:36 memory:cf000000-cfffffff memory:70000000-7fffffff memory:80000000-81ffffff ioport:5000(size=128) memory:c0000-dffff
Any explanation?

Related

EGL vs OpenGL context

I use BGFX framework for rendering in an application. I'm on Linux with an nvidia graphics card, and the BGFX build I use uses OpenGL as a backend (don't want to switch to Vulkan backend).
Everything worked fine, but one new feature requires me to use EGL. The first thing I do in the main is setting EGL to use OpenGL as a rendering API with:
if (not eglBindAPI(EGL_OPENGL_API) || (eglGetError() != EGL_SUCCESS))
//error handling
It works well.
Then I create an X11 window, I call eglGetDisplay, call eglInitialize, call eglChooseConfig, all of them returns without any error.
Then I call BGFX init, it runs well without any error.
At this point I have an initialized BGFX (using OpenGL backend), a current OpenGL context (created by BGFX):
std::cout << "GL Cont: " << glXGetCurrentContext() << std::endl; // Valid pointer
std::cout << "EGL Cont: " << eglGetCurrentContext() << std::endl; // 0x0
std::cout << "BGFX Renderer: " << bgfx::getRendererType() << std::endl; // 8 - OpenGL
Then I would like to execute the new EGL stuff related to the new feature on a different thread (I call eglBindAPI on the new thread as well):
EGLContext globalEglContext{};
{
static constexpr EGLint contextAttr[]{
EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE
};
globalEglContext = eglCreateContext(eglDisplay, eglConfig, EGL_NO_CONTEXT, contextAttr);
if (EGL_NO_CONTEXT == globalEglContext)
{ //error handling }
}
if (!eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, globalEglContext))
{
printf("Error on eglMakeCurrent (error: 0x%x)", eglGetError());
}
The context creation is fine, but the eglMakeCurrent call returns false, but the error code is 0x3000 (EGL_SUCCESS):
Error on eglMakeCurrent (error: 0x3000)
I cannot ignore the error as the next EGL operation fails, so it is an error indeed.
If I execute the very same context creation code on the main thread I get:
Error on eglMakeCurrent (error: 0x3002)
Checking 0x3002 (EGL_BAD_ACCESS) in the manual doesn't explain my case.
If I create & make my EGL context current on the main thread before initializing BGFX and I add the following X11 error handler:
XSetErrorHandler(+[](Display *display, XErrorEvent *error)
{
char buf[255];
XGetErrorText(display, error->error_code, buf, 255);
printf("X11 error: %s", buf);
return 1;
});
Then the context creation and making it current works well, but during BGFX init I get the following error message:
X11 error: GLXBadDrawableX11 error: GLXBadDrawableX11
I have two questions:
Is it possible that EGL and OpenGL contexts cannot be used in the same time? (On a thread I would have a current OpenGL context while on another thread an EGL context)
If it is not possible to use OpenGL and EGL contexts in the same time not even on different threads then how could I use EGL features while I would like to continue using OpenGL as a rendering backend in the same time?
UPDATE:
I created a test app that creates and makes current a GLX context, then creates and tries to make current an EGL context and it fails.
Does it mean that EGL and OpenGL cannot be used in the same time?
The full source code (main.cpp):
#include <iostream>
#include <assert.h>
#include <thread>
#include <chrono>
#include <future>
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <GL/gl.h>
#include <GL/glx.h>
#include <SDL2/SDL.h>
#include <SDL2/SDL_syswm.h>
int main()
{
if (not eglBindAPI(EGL_OPENGL_API) || (eglGetError() != EGL_SUCCESS))
{
printf("Could not bind EGL ES API (error: 0x%0x)\n", eglGetError());
return -1;
}
XSetErrorHandler(+[](Display *display, XErrorEvent *error)
{
char buf[255];
XGetErrorText(display, error->error_code, buf, 255);
printf("X11 error: %s\n", buf);
return 1;
});
//
// WINDOW
//
uint32_t flags = SDL_WINDOW_RESIZABLE;
const auto sdlWindow = SDL_CreateWindow("win", 0, 0, 640, 480, flags);
SDL_ShowWindow(sdlWindow);
SDL_SysWMinfo wmi;
SDL_VERSION(&wmi.version);
if (!SDL_GetWindowWMInfo(sdlWindow, &wmi))
{
return -1;
}
auto display = wmi.info.x11.display;
//
// EGL INIT
//
void *eglConfig{};
void *eglDisplay{};
void *eglSurface{};
// EGL init
{
// Get EGL display
eglDisplay = eglGetDisplay((EGLNativeDisplayType)display);
if (eglDisplay == EGL_NO_DISPLAY)
{
printf("Could not create EGLDisplay (error: 0x%0x)\n", eglGetError());
return -1;
}
// Init EGL display
{
EGLint major;
EGLint minor;
if (!eglInitialize(eglDisplay, &major, &minor))
{
printf("Failed initializing EGL (error: 0x%0x)\n", eglGetError());
return -1;
}
else
{
printf("EGL initialized (Version: %d.%d)\n", major, minor);
}
}
// Choose EGL config
{
static constexpr EGLint cfgAttr[]{
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
EGL_RED_SIZE, 1,
EGL_GREEN_SIZE, 1,
EGL_BLUE_SIZE, 1,
EGL_ALPHA_SIZE, 1,
EGL_DEPTH_SIZE, 1,
EGL_NONE
};
EGLint numConfigs{0};
if (!eglChooseConfig(eglDisplay, cfgAttr, &eglConfig, 1, &numConfigs))
{
printf("Failed on eglChooseConfig (error: 0x%0x)\n", eglGetError());
return false;
}
}
// Create EGL surface
eglSurface = eglCreateWindowSurface(eglDisplay, eglConfig, wmi.info.x11.window, nullptr);
if(eglSurface == EGL_NO_SURFACE)
{
printf("Could not create EGLSurface (error: 0x%0x)\n", eglGetError());
return -1;
}
}
//
// OpenGL context
//
const auto screen = DefaultScreenOfDisplay(display);
const auto screenId = DefaultScreen(display);
static GLint glxAttribs[] = {
GLX_RGBA,
GLX_DOUBLEBUFFER,
GLX_DEPTH_SIZE, 24,
GLX_STENCIL_SIZE, 8,
GLX_RED_SIZE, 8,
GLX_GREEN_SIZE, 8,
GLX_BLUE_SIZE, 8,
GLX_SAMPLE_BUFFERS, 0,
GLX_SAMPLES, 0,
None
};
XVisualInfo* visual = glXChooseVisual(display, screenId, glxAttribs);
if (visual == 0)
{
printf("Could not create correct visual window.\n");
return -1;
}
GLXContext context = glXCreateContext(display, visual, NULL, GL_TRUE);
if( !glXMakeContextCurrent(display, None, None, context))
{
printf("Could not make context current.\n");
return -1;
}
std::cout << "GL Cont: " << glXGetCurrentContext() << std::endl;
std::cout << "EGL Cont: " << eglGetCurrentContext() << std::endl;
/*
// Uncomment this and EGL context creation works
if( !glXMakeContextCurrent(display, None, None, NULL))
{
printf("Could not make context current.\n");
return -1;
}
std::cout << "GL Cont: " << glXGetCurrentContext() << std::endl;
std::cout << "EGL Cont: " << eglGetCurrentContext() << std::endl;
*/
//
// EGL CONTEXT
//
auto launchPolicy = std::launch::deferred; // change it to std::launch::async to create EGL context on a thread
auto res = std::async(launchPolicy, [&](){
void *globalEglContext;
{
static constexpr EGLint contextAttr[]{
EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE
};
globalEglContext = eglCreateContext(eglDisplay, eglConfig, EGL_NO_CONTEXT, contextAttr);
if (EGL_NO_CONTEXT == globalEglContext)
{
printf("Error creating EGL context (error: 0x%x)\n", eglGetError());
exit(-2);
}
}
// fails with 0x3000 (EGL_SUCCESS) on a different thread.
// fails with 0x3002 (EGL_BAD_ACCESS) on the main thread.
if (!eglMakeCurrent(eglDisplay, eglSurface, eglSurface, globalEglContext))
{
printf("Error on eglMakeCurrent (error: 0x%x)\n", eglGetError());
exit(-3);
}
return 0;
});
res.wait();
std::cout << "GL Cont: " << glXGetCurrentContext() << std::endl;
std::cout << "EGL Cont: " << eglGetCurrentContext() << std::endl;
}
CMakeLists.txt:
cmake_minimum_required(VERSION 3.5)
project(EGLTest LANGUAGES CXX)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
find_package(OpenGL REQUIRED COMPONENTS EGL)
find_package(PkgConfig REQUIRED)
pkg_check_modules(X11 REQUIRED x11)
pkg_check_modules(SDL2 REQUIRED sdl2)
add_executable(${PROJECT_NAME} main.cpp)
target_include_directories(
${PROJECT_NAME}
SYSTEM
PUBLIC ${OPENGL_EGL_INCLUDE_DIRS}
PUBLIC ${SDL2_INCLUDE_DIRS}
)
target_link_libraries(
${PROJECT_NAME}
OpenGL::EGL
${SDL2_LIBRARIES}
)
UPDATE 2:
My config:
Kubuntu 22.04 LTS 5.15.0-52-generic
Operating System: Ubuntu 22.04
KDE Plasma Version: 5.24.6
KDE Frameworks Version: 5.98.0
Qt Version: 5.15.3
Kernel Version: 5.15.0-52-generic (64-bit)
Graphics Platform: X11
Processors: 16 × 11th Gen Intel® Core™ i7-11800H # 2.30GHz
NVIDIA-SMI 470.141.03 Driver Version: 470.141.03 CUDA Version: 11.4
OpenGL vendor string: NVIDIA Corporation
OpenGL renderer string: NVIDIA GeForce RTX 3050 Ti Laptop GPU/PCIe/SSE2
OpenGL core profile version string: 4.6.0 NVIDIA 470.141.03
OpenGL core profile shading language version string: 4.60 NVIDIA
OpenGL ES profile version string: OpenGL ES 3.2 NVIDIA 470.141.03
OpenGL ES profile shading language version string: OpenGL ES GLSL ES 3.20
UPDATE 3:
adam#pc:~/git/bgfx_test/build$ ldd BgfxTest | grep GL
libEGL.so.1 => /lib/x86_64-linux-gnu/libEGL.so.1 (0x00007f32b95dd000)
libGLX.so.0 => /lib/x86_64-linux-gnu/libGLX.so.0 (0x00007f32b95a9000)
libGLdispatch.so.0 => /lib/x86_64-linux-gnu/libGLdispatch.so.0 (0x00007f32b8d9d000)

Error of failed request: GLXBadDrawable (C++ bgfx)

I am trying to make a purple window with glfw and bgfx as just a starting point for my game engine but when launching the binary it throws me an error. I am trying to create the purple window as shown in the image down below:
I am using cmake for it and the error is:
anuj#fedora ~/D/C/C/c/o/build> ./bgfx_test
X Error of failed request: GLXBadDrawable
Major opcode of failed request: 152 (GLX)
Minor opcode of failed request: 11 (X_GLXSwapBuffers)
Serial number of failed request: 33
Current serial number in output stream: 33
I want to use opengl for it my CMakeLists.txt is:
cmake_minimum_required(VERSION 3.22.1)
project(bgfx_test)
set(CMAKE_CXX_STANDARD 14)
add_executable(bgfx_test main.cpp )
target_link_libraries(bgfx_test bgfx glfw3 GL dl X11)
and my main.cpp file is:
#include <GL/gl.h>
#include "submods/bgfx.cmake/bgfx/examples/common/common.h"
#include "submods/bgfx.cmake/bgfx/examples/common/bgfx_utils.h"
#include "submods/bgfx.cmake/bgfx/include/bgfx/bgfx.h"
#include <GLFW/glfw3.h>
#include <iostream>
#define GLFW_EXPOSE_NATIVE_X11
#include <GLFW/glfw3native.h>
void exit(GLFWwindow *window)
{
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_TRUE)
{
/* code */
glfwSetWindowShouldClose(window, true);
}
}
int main()
{
``` glfwInit();
GLFWwindow *window = glfwCreateWindow(900, 900, "learn_bgfx", NULL, NULL);
if (window == NULL)
{
printf("no glfw");
glfwTerminate();
return -1;
}
bgfx::PlatformData pd;
pd.ndt = glfwGetX11Display();
pd.nwh = (void *)glfwGetX11Window(window);
bgfx::Init bgfxInit;
bgfxInit.type = bgfx::RendererType::Count; // Automatically choose a renderer.
bgfxInit.resolution.width = 900;
bgfxInit.resolution.height = 900;
bgfxInit.resolution.reset = BGFX_RESET_VSYNC;
bgfx::init(bgfxInit);
bgfx::setViewClear(0, BGFX_CLEAR_COLOR | BGFX_CLEAR_DEPTH, 0x443355FF, 1.0f, 0);
bgfx::setViewRect(0, 0, 0, 900, 900);
unsigned int counter = 0;
while (true)
{
bgfx::frame();
counter++;
}
glfwTerminate();
return 0;
```
}
and my projects files are:
main.cpp
test.cpp
CMakeLists.txt
submods
bgfx.cmake
out
build
and here is the make file for main.cpp

GLEW error (1): Missing GL version (GLFW)

I'm trying to use OpenGL with GLEW and GLFW. However, it appears that I cannot call glewInit() successfully and I get the following error message: GLEW error (1): Missing GL version. Similar questions at Stackoverflow haven't solved my problem.
When building my project, I am including the glew.c file and the directory where glew.h is.
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
int main()
{
glfwInit();
glfwWindowHint(GLFW_SAMPLES, 0);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(SCR_WIDTH, SCR_HEIGHT, "Testing", nullptr, nullptr);
glfwMakeContextCurrent(window);
glewExperimental = true;
GLenum glewErr = glewInit();
if (glewErr != GLEW_OK)
{
std::cerr << "GLEW error (" << glewErr << "): " << glewGetErrorString(glewErr) << std::endl;
glfwTerminate();
return -1;
}
// Next, I paint some stuff using OpenGL ...
}
Everything looks ok in the code. Furthermore, for some reason, if I use GLAD instead of GLEW, my code works just fine.
System: Ubuntu 18.04.5 LTS.
OGL version: 3.3.0 NVIDIA 340.108
OGL vendor: NVIDIA Corporation
Renderer: GeForce 820M/PCIe/SSE2
GLSL version: 3.30 NVIDIA via Cg compiler
UPDATE:
Diving deep into glew.c code I found where the error pops up, though I can't still understand why. Here it is the piece of code
static GLenum GLEWAPIENTRY glewContextInit ()
{
PFNGLGETSTRINGPROC getString;
const GLubyte* s;
GLuint dot;
#ifdef _WIN32
getString = glGetString; // Not executed, I'm on Ubuntu
#else
getString = (PFNGLGETSTRINGPROC) glewGetProcAddress((const GLubyte*)"glGetString");
if (!getString)
return GLEW_ERROR_NO_GL_VERSION;
#endif
/* query opengl version */
s = getString(GL_VERSION); // <<< ERROR: s gets a null pointer
dot = _glewStrCLen(s, '.');
if (dot == 0)
return GLEW_ERROR_NO_GL_VERSION; // <<< Function escapes here
// ... more statement down here
}

SDL2 on Raspberry Pi without X?

I'm hoping to develop some code that uses SDL2 to display graphics on the 7" RPi touchscreen, but I'd rather not install a full desktop OS. I've got Raspbian Buster Lite installed. Some simple test code gets an error when I try to run it:
user#rpi4:~/01_hello_SDL $ ./hw
Window could not be created! SDL_Error: Could not initialize EGL
user#rpi4:~/01_hello_SDL $ sudo ./hw
error: XDG_RUNTIME_DIR not set in the environment.
Window could not be created! SDL_Error: Could not initialize EGL
I'm trying to create the window with
SDL_CreateWindow( "SDL Tutorial", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, SCREEN_WIDTH, SCREEN_HEIGHT, SDL_WINDOW_SHOWN | SDL_WINDOW_OPENGL )
I found a post that referenced instructions on how to build SDL2 without X, but I was hoping someone could educate me a bit more about how SDL finds the display in various environments, and if it’s even possible to do what I want to do.
A few years ago I used SDL 1.2 to do full-screen graphics on a Beaglebone Black running a version of Debian, but I seem to have lost that installation, and don’t remember how it was set up. I vaguely recall some issues around fbdev and it being non-accelerated graphics, but that didn’t matter at the time (and while I’d like to get accelerated graphics now, it’s not critical).
Example code:
/*This source code copyrighted by Lazy Foo' Productions (2004-2019)
and may not be redistributed without written permission.*/
//Using SDL and standard IO
#include <SDL.h>
#include <stdio.h>
//Screen dimension constants
const int SCREEN_WIDTH = 800;
const int SCREEN_HEIGHT = 480;
int main( int argc, char* args[] )
{
//The window we'll be rendering to
SDL_Window* window = NULL;
//The surface contained by the window
SDL_Surface* screenSurface = NULL;
//Initialize SDL
if( SDL_Init( SDL_INIT_VIDEO ) < 0 )
{
printf( "SDL could not initialize! SDL_Error: %s\n", SDL_GetError() );
}
else
{
//Create window
window = SDL_CreateWindow( "SDL Tutorial", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, SCREEN_WIDTH, SCREEN_HEIGHT, SDL_WINDOW_FULLSCREEN | SDL_WINDOW_SHOWN | SDL_WINDOW_OPENGL );
if( window == NULL )
{
printf( "Window could not be created! SDL_Error: %s\n", SDL_GetError() );
}
else
{
//Get window surface
screenSurface = SDL_GetWindowSurface( window );
//Fill the surface white
SDL_FillRect( screenSurface, NULL, SDL_MapRGB( screenSurface->format, 0xFF, 0xFF, 0xFF ) );
//Update the surface
SDL_UpdateWindowSurface( window );
//Wait two seconds
SDL_Delay( 2000 );
}
}
//Destroy window
SDL_DestroyWindow( window );
//Quit SDL subsystems
SDL_Quit();
return 0;
}
Alrighty, got it working on my Raspberry Pi 3 with 2019-07-10-raspbian-buster-lite.img, both with the default Broadcom blobs & the KMS/DRM backend:
Install SDL2 build dependencies:
# install everything Debian uses to build SDL
sudo apt build-dep libsdl2
# needed for the KMSDRM backend:
sudo apt install libdrm-dev libgbm-dev
Grab the latest stable SDL source tarball or tag (release-2.0.10) from Git and extract it somewhere like ~/sdl-src
Run SDL's configure script:
cd ~/sdl-src
./configure --enable-video-kmsdrm
Here's my configure summary:
SDL2 Configure Summary:
Building Shared Libraries
Building Static Libraries
Enabled modules : atomic audio video render events joystick haptic sensor power filesystem threads timers file loadso cpuinfo assembly
Assembly Math :
Audio drivers : disk dummy oss alsa(dynamic) pulse(dynamic) sndio(dynamic)
Video drivers : dummy rpi x11(dynamic) kmsdrm(dynamic) opengl opengl_es1 opengl_es2 vulkan wayland(dynamic)
X11 libraries : xcursor xdbe xinerama xinput2 xinput2_multitouch xrandr xscrnsaver xshape xvidmode
Input drivers : linuxev linuxkd
Using libsamplerate : YES
Using libudev : YES
Using dbus : YES
Using ime : YES
Using ibus : YES
Using fcitx : YES
Note the rpi and kmsdrm(dynamic) entries in the Video drivers list:
Video drivers : dummy rpi x11(dynamic) kmsdrm(dynamic) opengl opengl_es1 opengl_es2 vulkan wayland(dynamic)
^^^ ^^^^^^^^^^^^^^^
Build & install SDL; took ~4.5 minutes on my Rpi3:
make -j4 && sudo make install
Build test program:
g++ main.cpp `pkg-config --cflags --libs sdl2`
(Optional) Enable the "Full KMS" driver if you want to use the KMSDRM backend instead of the default OpenGL ES blobs:
$ sudo raspi-config
select '7 Advanced Options'
select 'A7 GL Driver'
select 'G3 GL (Full KMS)'
reboot
Run test program:
$ ./a.out
Testing video drivers...
The path /dev/dri/ cannot be opened or is not available
The path /dev/dri/ cannot be opened or is not available
SDL_VIDEODRIVER available: x11 wayland KMSDRM RPI dummy
SDL_VIDEODRIVER usable : RPI
The path /dev/dri/ cannot be opened or is not available
The path /dev/dri/ cannot be opened or is not available
SDL_VIDEODRIVER selected : RPI
SDL_RENDER_DRIVER available: opengl opengles2 opengles software
SDL_RENDER_DRIVER selected : opengles2
You can use environment variables to override the default video/render driver selection:
SDL_VIDEODRIVER=KMSDRM SDL_RENDER_DRIVER=software ./a.out
I had to hold SDL's hand a bit with envvars to get the KMSDRM backend to load:
# no envvars, fails:
$ ./a.out
Testing video drivers...
SDL_VIDEODRIVER available: x11 wayland KMSDRM RPI dummy
SDL_VIDEODRIVER usable : KMSDRM
SDL_VIDEODRIVER selected : KMSDRM
SDL_CreateWindow(): Could not initialize OpenGL / GLES library
# with envvars, succeeds:
$ SDL_VIDEO_EGL_DRIVER=libEGL.so SDL_VIDEO_GL_DRIVER=libGLESv2.so ./a.out
Testing video drivers...
SDL_VIDEODRIVER available: x11 wayland KMSDRM RPI dummy
SDL_VIDEODRIVER usable : KMSDRM
SDL_VIDEODRIVER selected : KMSDRM
SDL_RENDER_DRIVER available: opengl opengles2 opengles software
SDL_RENDER_DRIVER selected : opengl
Here's the test program I've been using:
// g++ main.cpp `pkg-config --cflags --libs sdl2`
#include <SDL.h>
#include <iostream>
#include <vector>
int main( int argc, char** argv )
{
SDL_Init( 0 );
std::cout << "Testing video drivers..." << '\n';
std::vector< bool > drivers( SDL_GetNumVideoDrivers() );
for( int i = 0; i < drivers.size(); ++i )
{
drivers[ i ] = ( 0 == SDL_VideoInit( SDL_GetVideoDriver( i ) ) );
SDL_VideoQuit();
}
std::cout << "SDL_VIDEODRIVER available:";
for( int i = 0; i < drivers.size(); ++i )
{
std::cout << " " << SDL_GetVideoDriver( i );
}
std::cout << '\n';
std::cout << "SDL_VIDEODRIVER usable :";
for( int i = 0; i < drivers.size(); ++i )
{
if( !drivers[ i ] ) continue;
std::cout << " " << SDL_GetVideoDriver( i );
}
std::cout << '\n';
if( SDL_Init( SDL_INIT_EVERYTHING ) < 0 )
{
std::cerr << "SDL_Init(): " << SDL_GetError() << '\n';
return EXIT_FAILURE;
}
std::cout << "SDL_VIDEODRIVER selected : " << SDL_GetCurrentVideoDriver() << '\n';
SDL_Window* window = SDL_CreateWindow
(
"SDL2",
SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
640, 480,
SDL_WINDOW_SHOWN
);
if( nullptr == window )
{
std::cerr << "SDL_CreateWindow(): " << SDL_GetError() << '\n';
return EXIT_FAILURE;
}
std::cout << "SDL_RENDER_DRIVER available:";
for( int i = 0; i < SDL_GetNumRenderDrivers(); ++i )
{
SDL_RendererInfo info;
SDL_GetRenderDriverInfo( i, &info );
std::cout << " " << info.name;
}
std::cout << '\n';
SDL_Renderer* renderer = SDL_CreateRenderer( window, -1, SDL_RENDERER_ACCELERATED );
if( nullptr == renderer )
{
std::cerr << "SDL_CreateRenderer(): " << SDL_GetError() << '\n';
return EXIT_FAILURE;
}
SDL_RendererInfo info;
SDL_GetRendererInfo( renderer, &info );
std::cout << "SDL_RENDER_DRIVER selected : " << info.name << '\n';
bool running = true;
unsigned char i = 0;
while( running )
{
SDL_Event ev;
while( SDL_PollEvent( &ev ) )
{
if( ( ev.type == SDL_QUIT ) ||
( ev.type == SDL_KEYDOWN && ev.key.keysym.sym == SDLK_ESCAPE ) )
{
running = false;
}
}
SDL_SetRenderDrawColor( renderer, i, i, i, SDL_ALPHA_OPAQUE );
SDL_RenderClear( renderer );
SDL_RenderPresent( renderer );
i++;
}
SDL_DestroyRenderer( renderer );
SDL_DestroyWindow( window );
SDL_Quit();
return 0;
}

OpenGL and GL3W support issues

There is something strange happening with gl3w's isSupported function. When I call isSupported(4, 0) it returns false, meaning OpenGL 4.0 isn't supported. However, when I call glGetString(GL_VERSION) it says OpenGL version 4.0.
Does this mean I can use OpenGL 4.0 functions?
I'm using gl3w in C++ and Visual Studio 2017
#include <GL/gl3w.h>
#include <GLFW/glfw3.h>
int main(int argc, char** argv){
if(!glfwInit()) {
FATAL_ERROR("Failed to initialise GLFW");
}
glfwSetErrorCallback(glfwErrorCallback);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
GLFWwindow* window = glfwCreateWindow(640, 480, "OpenGL", nullptr, nullptr);
//If i put glfwMakeContextCurrent here gl3wInit fails
//glfwMakeContextCurrent(window);
if (!window) {
glfwTerminate();
FATAL_ERROR("Window creation failed");
}
if(!gl3wInit()) {} // handle that
glfwMakeContextCurrent(window);
bool support = gl3wIsSupported(4, 0); // returns false
const char* version = glGetString(GL_VERSION); // return "4.0.0"
}
You have to make a GL context current before you call gl3wInit() or regular OpenGL functions otherwise they won't do anything useful.
In the OpenGL wiki you can read:
The GL3W library focuses on the core profile of OpenGL 3 and 4. It
only loads the core entrypoints for these OpenGL versions. It supports
Windows, Mac OS X, Linux, and FreeBSD.
Note: GL3W loads core OpenGL
only by default. All OpenGL extensions will be loaded if the --ext
flag is specified to gl3w_gen.py.
And this is confirmed looking inside the code:
int gl3wIsSupported(int major, int minor)
{
if (major < 3) // <<<<=========== SEE THIS
return 0;
if (version.major == major)
return version.minor >= minor;
return version.major >= major;
}
You are asking with glfwWindowHint for an old 2.0 version. Thus, gl3wIsSupported will return false and gl3wInit will return GL3W_ERROR_OPENGL_VERSION.
For glGetString(GL_VERSION) returning "4.0" means that, yes, you can use that 4.0 version. Ask for it with glfwWindowHint.
I fixed it by switching over to glad instead
if (!glfwInit()) {
FATAL_ERROR("Failed to initialise GLFW");
}
glfwSetErrorCallback(glfwErrorCallback);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
GLFWwindow* window = glfwCreateWindow(640, 480, "OpenGL", nullptr, nullptr);
if (!window) {
glfwTerminate();
FATAL_ERROR("Window creation failed");
}
glfwMakeContextCurrent(window);
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress)) {
glfwDestroyWindow(window);
glfwTerminate();
FATAL_ERROR("Failed to initialise OpenGL context");
}
PRINT("OpenGL Version: " << GLVersion.major << "." << GLVersion.minor);