EGL vs OpenGL context - c++

I use BGFX framework for rendering in an application. I'm on Linux with an nvidia graphics card, and the BGFX build I use uses OpenGL as a backend (don't want to switch to Vulkan backend).
Everything worked fine, but one new feature requires me to use EGL. The first thing I do in the main is setting EGL to use OpenGL as a rendering API with:
if (not eglBindAPI(EGL_OPENGL_API) || (eglGetError() != EGL_SUCCESS))
//error handling
It works well.
Then I create an X11 window, I call eglGetDisplay, call eglInitialize, call eglChooseConfig, all of them returns without any error.
Then I call BGFX init, it runs well without any error.
At this point I have an initialized BGFX (using OpenGL backend), a current OpenGL context (created by BGFX):
std::cout << "GL Cont: " << glXGetCurrentContext() << std::endl; // Valid pointer
std::cout << "EGL Cont: " << eglGetCurrentContext() << std::endl; // 0x0
std::cout << "BGFX Renderer: " << bgfx::getRendererType() << std::endl; // 8 - OpenGL
Then I would like to execute the new EGL stuff related to the new feature on a different thread (I call eglBindAPI on the new thread as well):
EGLContext globalEglContext{};
{
static constexpr EGLint contextAttr[]{
EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE
};
globalEglContext = eglCreateContext(eglDisplay, eglConfig, EGL_NO_CONTEXT, contextAttr);
if (EGL_NO_CONTEXT == globalEglContext)
{ //error handling }
}
if (!eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, globalEglContext))
{
printf("Error on eglMakeCurrent (error: 0x%x)", eglGetError());
}
The context creation is fine, but the eglMakeCurrent call returns false, but the error code is 0x3000 (EGL_SUCCESS):
Error on eglMakeCurrent (error: 0x3000)
I cannot ignore the error as the next EGL operation fails, so it is an error indeed.
If I execute the very same context creation code on the main thread I get:
Error on eglMakeCurrent (error: 0x3002)
Checking 0x3002 (EGL_BAD_ACCESS) in the manual doesn't explain my case.
If I create & make my EGL context current on the main thread before initializing BGFX and I add the following X11 error handler:
XSetErrorHandler(+[](Display *display, XErrorEvent *error)
{
char buf[255];
XGetErrorText(display, error->error_code, buf, 255);
printf("X11 error: %s", buf);
return 1;
});
Then the context creation and making it current works well, but during BGFX init I get the following error message:
X11 error: GLXBadDrawableX11 error: GLXBadDrawableX11
I have two questions:
Is it possible that EGL and OpenGL contexts cannot be used in the same time? (On a thread I would have a current OpenGL context while on another thread an EGL context)
If it is not possible to use OpenGL and EGL contexts in the same time not even on different threads then how could I use EGL features while I would like to continue using OpenGL as a rendering backend in the same time?
UPDATE:
I created a test app that creates and makes current a GLX context, then creates and tries to make current an EGL context and it fails.
Does it mean that EGL and OpenGL cannot be used in the same time?
The full source code (main.cpp):
#include <iostream>
#include <assert.h>
#include <thread>
#include <chrono>
#include <future>
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <GL/gl.h>
#include <GL/glx.h>
#include <SDL2/SDL.h>
#include <SDL2/SDL_syswm.h>
int main()
{
if (not eglBindAPI(EGL_OPENGL_API) || (eglGetError() != EGL_SUCCESS))
{
printf("Could not bind EGL ES API (error: 0x%0x)\n", eglGetError());
return -1;
}
XSetErrorHandler(+[](Display *display, XErrorEvent *error)
{
char buf[255];
XGetErrorText(display, error->error_code, buf, 255);
printf("X11 error: %s\n", buf);
return 1;
});
//
// WINDOW
//
uint32_t flags = SDL_WINDOW_RESIZABLE;
const auto sdlWindow = SDL_CreateWindow("win", 0, 0, 640, 480, flags);
SDL_ShowWindow(sdlWindow);
SDL_SysWMinfo wmi;
SDL_VERSION(&wmi.version);
if (!SDL_GetWindowWMInfo(sdlWindow, &wmi))
{
return -1;
}
auto display = wmi.info.x11.display;
//
// EGL INIT
//
void *eglConfig{};
void *eglDisplay{};
void *eglSurface{};
// EGL init
{
// Get EGL display
eglDisplay = eglGetDisplay((EGLNativeDisplayType)display);
if (eglDisplay == EGL_NO_DISPLAY)
{
printf("Could not create EGLDisplay (error: 0x%0x)\n", eglGetError());
return -1;
}
// Init EGL display
{
EGLint major;
EGLint minor;
if (!eglInitialize(eglDisplay, &major, &minor))
{
printf("Failed initializing EGL (error: 0x%0x)\n", eglGetError());
return -1;
}
else
{
printf("EGL initialized (Version: %d.%d)\n", major, minor);
}
}
// Choose EGL config
{
static constexpr EGLint cfgAttr[]{
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
EGL_RED_SIZE, 1,
EGL_GREEN_SIZE, 1,
EGL_BLUE_SIZE, 1,
EGL_ALPHA_SIZE, 1,
EGL_DEPTH_SIZE, 1,
EGL_NONE
};
EGLint numConfigs{0};
if (!eglChooseConfig(eglDisplay, cfgAttr, &eglConfig, 1, &numConfigs))
{
printf("Failed on eglChooseConfig (error: 0x%0x)\n", eglGetError());
return false;
}
}
// Create EGL surface
eglSurface = eglCreateWindowSurface(eglDisplay, eglConfig, wmi.info.x11.window, nullptr);
if(eglSurface == EGL_NO_SURFACE)
{
printf("Could not create EGLSurface (error: 0x%0x)\n", eglGetError());
return -1;
}
}
//
// OpenGL context
//
const auto screen = DefaultScreenOfDisplay(display);
const auto screenId = DefaultScreen(display);
static GLint glxAttribs[] = {
GLX_RGBA,
GLX_DOUBLEBUFFER,
GLX_DEPTH_SIZE, 24,
GLX_STENCIL_SIZE, 8,
GLX_RED_SIZE, 8,
GLX_GREEN_SIZE, 8,
GLX_BLUE_SIZE, 8,
GLX_SAMPLE_BUFFERS, 0,
GLX_SAMPLES, 0,
None
};
XVisualInfo* visual = glXChooseVisual(display, screenId, glxAttribs);
if (visual == 0)
{
printf("Could not create correct visual window.\n");
return -1;
}
GLXContext context = glXCreateContext(display, visual, NULL, GL_TRUE);
if( !glXMakeContextCurrent(display, None, None, context))
{
printf("Could not make context current.\n");
return -1;
}
std::cout << "GL Cont: " << glXGetCurrentContext() << std::endl;
std::cout << "EGL Cont: " << eglGetCurrentContext() << std::endl;
/*
// Uncomment this and EGL context creation works
if( !glXMakeContextCurrent(display, None, None, NULL))
{
printf("Could not make context current.\n");
return -1;
}
std::cout << "GL Cont: " << glXGetCurrentContext() << std::endl;
std::cout << "EGL Cont: " << eglGetCurrentContext() << std::endl;
*/
//
// EGL CONTEXT
//
auto launchPolicy = std::launch::deferred; // change it to std::launch::async to create EGL context on a thread
auto res = std::async(launchPolicy, [&](){
void *globalEglContext;
{
static constexpr EGLint contextAttr[]{
EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE
};
globalEglContext = eglCreateContext(eglDisplay, eglConfig, EGL_NO_CONTEXT, contextAttr);
if (EGL_NO_CONTEXT == globalEglContext)
{
printf("Error creating EGL context (error: 0x%x)\n", eglGetError());
exit(-2);
}
}
// fails with 0x3000 (EGL_SUCCESS) on a different thread.
// fails with 0x3002 (EGL_BAD_ACCESS) on the main thread.
if (!eglMakeCurrent(eglDisplay, eglSurface, eglSurface, globalEglContext))
{
printf("Error on eglMakeCurrent (error: 0x%x)\n", eglGetError());
exit(-3);
}
return 0;
});
res.wait();
std::cout << "GL Cont: " << glXGetCurrentContext() << std::endl;
std::cout << "EGL Cont: " << eglGetCurrentContext() << std::endl;
}
CMakeLists.txt:
cmake_minimum_required(VERSION 3.5)
project(EGLTest LANGUAGES CXX)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
find_package(OpenGL REQUIRED COMPONENTS EGL)
find_package(PkgConfig REQUIRED)
pkg_check_modules(X11 REQUIRED x11)
pkg_check_modules(SDL2 REQUIRED sdl2)
add_executable(${PROJECT_NAME} main.cpp)
target_include_directories(
${PROJECT_NAME}
SYSTEM
PUBLIC ${OPENGL_EGL_INCLUDE_DIRS}
PUBLIC ${SDL2_INCLUDE_DIRS}
)
target_link_libraries(
${PROJECT_NAME}
OpenGL::EGL
${SDL2_LIBRARIES}
)
UPDATE 2:
My config:
Kubuntu 22.04 LTS 5.15.0-52-generic
Operating System: Ubuntu 22.04
KDE Plasma Version: 5.24.6
KDE Frameworks Version: 5.98.0
Qt Version: 5.15.3
Kernel Version: 5.15.0-52-generic (64-bit)
Graphics Platform: X11
Processors: 16 × 11th Gen Intel® Core™ i7-11800H # 2.30GHz
NVIDIA-SMI 470.141.03 Driver Version: 470.141.03 CUDA Version: 11.4
OpenGL vendor string: NVIDIA Corporation
OpenGL renderer string: NVIDIA GeForce RTX 3050 Ti Laptop GPU/PCIe/SSE2
OpenGL core profile version string: 4.6.0 NVIDIA 470.141.03
OpenGL core profile shading language version string: 4.60 NVIDIA
OpenGL ES profile version string: OpenGL ES 3.2 NVIDIA 470.141.03
OpenGL ES profile shading language version string: OpenGL ES GLSL ES 3.20
UPDATE 3:
adam#pc:~/git/bgfx_test/build$ ldd BgfxTest | grep GL
libEGL.so.1 => /lib/x86_64-linux-gnu/libEGL.so.1 (0x00007f32b95dd000)
libGLX.so.0 => /lib/x86_64-linux-gnu/libGLX.so.0 (0x00007f32b95a9000)
libGLdispatch.so.0 => /lib/x86_64-linux-gnu/libGLdispatch.so.0 (0x00007f32b8d9d000)

Related

GLEW error (1): Missing GL version (GLFW)

I'm trying to use OpenGL with GLEW and GLFW. However, it appears that I cannot call glewInit() successfully and I get the following error message: GLEW error (1): Missing GL version. Similar questions at Stackoverflow haven't solved my problem.
When building my project, I am including the glew.c file and the directory where glew.h is.
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
int main()
{
glfwInit();
glfwWindowHint(GLFW_SAMPLES, 0);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(SCR_WIDTH, SCR_HEIGHT, "Testing", nullptr, nullptr);
glfwMakeContextCurrent(window);
glewExperimental = true;
GLenum glewErr = glewInit();
if (glewErr != GLEW_OK)
{
std::cerr << "GLEW error (" << glewErr << "): " << glewGetErrorString(glewErr) << std::endl;
glfwTerminate();
return -1;
}
// Next, I paint some stuff using OpenGL ...
}
Everything looks ok in the code. Furthermore, for some reason, if I use GLAD instead of GLEW, my code works just fine.
System: Ubuntu 18.04.5 LTS.
OGL version: 3.3.0 NVIDIA 340.108
OGL vendor: NVIDIA Corporation
Renderer: GeForce 820M/PCIe/SSE2
GLSL version: 3.30 NVIDIA via Cg compiler
UPDATE:
Diving deep into glew.c code I found where the error pops up, though I can't still understand why. Here it is the piece of code
static GLenum GLEWAPIENTRY glewContextInit ()
{
PFNGLGETSTRINGPROC getString;
const GLubyte* s;
GLuint dot;
#ifdef _WIN32
getString = glGetString; // Not executed, I'm on Ubuntu
#else
getString = (PFNGLGETSTRINGPROC) glewGetProcAddress((const GLubyte*)"glGetString");
if (!getString)
return GLEW_ERROR_NO_GL_VERSION;
#endif
/* query opengl version */
s = getString(GL_VERSION); // <<< ERROR: s gets a null pointer
dot = _glewStrCLen(s, '.');
if (dot == 0)
return GLEW_ERROR_NO_GL_VERSION; // <<< Function escapes here
// ... more statement down here
}

OpenGL failed Creating basic Window

I'm trying to compile a basic OpenGL program on Ubuntu, but I got a runtime error, the window I created crash!
Here is my code:
#include<iostream>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
const GLint WIDTH = 800 , HEIGH = 600;
int main ()
{
if (!glfwInit())
{
std::cerr << "GLFW initialization failed" << std::endl ;
glfwTerminate();
return 1 ;
}
//setup GLFW window properties
//OpenGL version 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR,3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR,3);
glfwWindowHint(GLFW_OPENGL_PROFILE,GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT,GL_TRUE);
GLFWwindow *main_window = glfwCreateWindow(WIDTH,HEIGH,"TestWindow",NULL,NULL);
if(!main_window)
{
std::cerr << "GLFW creation failed " << std::endl;
glfwTerminate();
return 1 ;
}
int bufferWidth,bufferHeight ;
glfwGetFramebufferSize(main_window,&bufferWidth,&bufferHeight);
glfwMakeContextCurrent(main_window);
glewExperimental = GL_TRUE ;
if(glewInit() != GLEW_OK)
{
glfwDestroyWindow(main_window);
glfwTerminate();
return 1 ;
}
glViewport(0,0,bufferWidth,bufferHeight);
// loop until window closed
while(!glfwWindowShouldClose(main_window));
{
glfwPollEvents();
glClearColor(1.0f,0.0f,0.0f,1.0f);
glClear(GL_COLOR_BUFFER_BIT);``
glfwSwapBuffers(main_window);
}
return 0 ;
}
This is the output i get:
i don't Know if it's related to my Graphic Card:
description: VGA compatible controller
product: GK107GLM [Quadro K1100M]
vendor: NVIDIA Corporation
physical id: 0
bus info: pci#0000:01:00.0
version: a1
width: 64 bits
clock: 33MHz
capabilities: vga_controller bus_master cap_list rom
configuration: driver=nouveau latency=0
resources: irq:36 memory:cf000000-cfffffff memory:70000000-7fffffff memory:80000000-81ffffff ioport:5000(size=128) memory:c0000-dffff
Any explanation?

OpenGL and GL3W support issues

There is something strange happening with gl3w's isSupported function. When I call isSupported(4, 0) it returns false, meaning OpenGL 4.0 isn't supported. However, when I call glGetString(GL_VERSION) it says OpenGL version 4.0.
Does this mean I can use OpenGL 4.0 functions?
I'm using gl3w in C++ and Visual Studio 2017
#include <GL/gl3w.h>
#include <GLFW/glfw3.h>
int main(int argc, char** argv){
if(!glfwInit()) {
FATAL_ERROR("Failed to initialise GLFW");
}
glfwSetErrorCallback(glfwErrorCallback);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
GLFWwindow* window = glfwCreateWindow(640, 480, "OpenGL", nullptr, nullptr);
//If i put glfwMakeContextCurrent here gl3wInit fails
//glfwMakeContextCurrent(window);
if (!window) {
glfwTerminate();
FATAL_ERROR("Window creation failed");
}
if(!gl3wInit()) {} // handle that
glfwMakeContextCurrent(window);
bool support = gl3wIsSupported(4, 0); // returns false
const char* version = glGetString(GL_VERSION); // return "4.0.0"
}
You have to make a GL context current before you call gl3wInit() or regular OpenGL functions otherwise they won't do anything useful.
In the OpenGL wiki you can read:
The GL3W library focuses on the core profile of OpenGL 3 and 4. It
only loads the core entrypoints for these OpenGL versions. It supports
Windows, Mac OS X, Linux, and FreeBSD.
Note: GL3W loads core OpenGL
only by default. All OpenGL extensions will be loaded if the --ext
flag is specified to gl3w_gen.py.
And this is confirmed looking inside the code:
int gl3wIsSupported(int major, int minor)
{
if (major < 3) // <<<<=========== SEE THIS
return 0;
if (version.major == major)
return version.minor >= minor;
return version.major >= major;
}
You are asking with glfwWindowHint for an old 2.0 version. Thus, gl3wIsSupported will return false and gl3wInit will return GL3W_ERROR_OPENGL_VERSION.
For glGetString(GL_VERSION) returning "4.0" means that, yes, you can use that 4.0 version. Ask for it with glfwWindowHint.
I fixed it by switching over to glad instead
if (!glfwInit()) {
FATAL_ERROR("Failed to initialise GLFW");
}
glfwSetErrorCallback(glfwErrorCallback);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
GLFWwindow* window = glfwCreateWindow(640, 480, "OpenGL", nullptr, nullptr);
if (!window) {
glfwTerminate();
FATAL_ERROR("Window creation failed");
}
glfwMakeContextCurrent(window);
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress)) {
glfwDestroyWindow(window);
glfwTerminate();
FATAL_ERROR("Failed to initialise OpenGL context");
}
PRINT("OpenGL Version: " << GLVersion.major << "." << GLVersion.minor);

glewInit() fails with "Missing GL version", SDL2 OpenGL context, cygwin compiler

The program following, is one that creates a window which does nothing except close when you press esc. When I compile it with cygwin, there are no errors. The GLEW I use is from Cygwin Ports, and the SDL2 is version 2.0.3, from their website's SDL2-devel-2.0.3-mingw.tar.gz download. I have SDL2.dll in the directory of the compiled executable.
Links with: -lSDL2 -lSDL2main -lGLEW -lGLU -lGL -lSDL2 -lSDL2main -lGLEW -lGLU -lGL, twice to ensure everything is linked.
Also compiled with: -std=c++11
On my computer, the following program prints out:
OpenGL Vendor: (null)
OpenGL Renderer: (null)
OpenGL Shading Language Version: (null)
OpenGL Extensions: (null)
Error initializing GLEW! Missing GL version
The program appears to work otherwise. The main problem is that if I try to call, for example glGenVertexArrays, the program will crash with STATUS_ACCESS_VIOLATION. (See the crashing code here. I think this has something to do with GLEW's error Missing GL version.
#include <cstdio>
#include <chrono>
#include <thread>
#include <SDL2/SDL.h>
#include <GL/glew.h>
#include <SDL2/SDL_opengl.h>
#include <GL/glu.h>
const int width = 1000;
const int height = 500;
bool Running = true;
#undef main
int main (int argc, char *argv[]) {
FILE* cdebug = fopen("cdebug.txt", "w");
if (SDL_Init(SDL_INIT_VIDEO) < 0) {
fprintf(cdebug, "SDL could not initialize! SDL Error: %s\n", SDL_GetError()); fflush(cdebug);
}
#define setAttr(attr, value) \
if (SDL_GL_SetAttribute(attr, value) < 0) { \
fprintf(cdebug, "SDL failed to set %s to %s, SDL Error: %s\n", #attr, #value, SDL_GetError()); fflush(cdebug);\
}
setAttr(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
setAttr(SDL_GL_CONTEXT_MINOR_VERSION, 3);
setAttr(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
setAttr(SDL_GL_RED_SIZE, 8);
setAttr(SDL_GL_GREEN_SIZE, 8);
setAttr(SDL_GL_BLUE_SIZE, 8);
setAttr(SDL_GL_DEPTH_SIZE, 24);
setAttr(SDL_GL_DOUBLEBUFFER, 1);
#undef setAttr
/*
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
*/
SDL_Window *window = SDL_CreateWindow(
"test",
SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
640, 480,
SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE
);
if (window == NULL) {
fprintf(cdebug, "Window could not be created! SDL Error: %s\n", SDL_GetError()); fflush(cdebug);
}
SDL_GLContext GLContext = SDL_GL_CreateContext(window);
if (GLContext == NULL) {
fprintf(cdebug, "OpenGL context could not be created! SDL Error: %s\n", SDL_GetError()); fflush(cdebug);
}
if (SDL_GL_MakeCurrent(window, GLContext) < 0) {
fprintf(cdebug, "OpenGL context could not be made current! SDL Error: %s\n", SDL_GetError()); fflush(cdebug);
}
fprintf(cdebug, "OpenGL Vendor: %s\n", glGetString(GL_VENDOR));
fprintf(cdebug, "OpenGL Renderer: %s\n", glGetString(GL_RENDERER));
fprintf(cdebug, "OpenGL Shading Language Version: %s\n", glGetString(GL_SHADING_LANGUAGE_VERSION));
fprintf(cdebug, "OpenGL Extensions: %s\n", glGetString(GL_EXTENSIONS));
fflush(cdebug);
glewExperimental = GL_TRUE;
{
GLenum glewError = glewInit();
if (glewError != GLEW_OK) {
fprintf(cdebug, "Error initializing GLEW! %s\n", glewGetErrorString(glewError)); fflush(cdebug);
}
}
SDL_Event event;
while (Running) {
while (SDL_PollEvent(&event)) {
switch (event.type) {
case SDL_KEYUP: {
switch (event.key.keysym.scancode) {
case SDL_SCANCODE_ESCAPE:
Running = false;
break;
}
break;
}
}
}
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
SDL_GL_SwapWindow(window);
std::this_thread::sleep_for(std::chrono::milliseconds(50));
}
SDL_GL_DeleteContext(GLContext);
SDL_DestroyWindow(window);
window = NULL;
SDL_Quit();
return 0;
}
You are mixing cygwin and mingw in ways in which you shouldn't.
If you use cygwin's toolchain and -lGL and so on, you link against cygwin's OpenGL - which is not the native OpenGL lib on windows, but the one provided by cygwin's X server, implementing the GLX protocol.
The mingw version of SDL will use the native GL lib (opengl32.dll) on windows, using the wgl API. So SDL might even create a context for you, but the GL functions your programm are calling belong to a completely different GL implementation - for which your program never created a GL context.
The solution is to stick with one or the other: Completely use cygwin, and a cygwin version of SDL, and a cygwin X server. However, that is not the path I would recommend. I don't know if that would even get you some HW acceleration at all.
The more useful solution would be to not use cygwin, but mingw, for the whole project, with a mingw version of GLEW. That will result in a completely native windows binrary which will use the native OpenGL library with all features provided by the driver and not require cygwin's dlls and especially not cygwin's X server.
I managed to get things working in a weird way.
I am using a self compiled version of SDL2 but the SDL2-devel-2.0.3-mingw.tar.gz provided by the SDL website seems to work as well and using a combination of them (such as mingw version's libs and self-compiled .dll) seem to work as well.
For GLEW, I am using my own compiled version. To compile this, I used their website's source glew-1.11.0.zip and extracted this. Then I edited glew-1.11.0/Makefile and edited line 24 to SYSTEM = cygming. Then in glew-1.11.0/config/Makefile.cygming on line's 7 and 8, I removed the -mno-cygwin flag (so the line's are CC := gcc and LD := gcc) and added -D_WIN32 to line 10 (so the line becomes CFLAGS.SO = -DGLEW_BUILD -D_WIN32). Then in glew-1.11.0, I ran make all and let it compile. After that, I copied glew-1.11.0/include/GL to my includes directory. Next, I copied glew-1.11.0/lib/libglew32.dll.a to my libs folder. I also copied glew-1.11.0/lib/glew32.dll to my .exe's folder. Then to get it to not produce a linker error, I had to place a #define _WIN32 before my #include <GL/glew.h>.
To link everything, I managed to compile it with a minimum of -lSDL2 -lSDL2main -lglew32.dll -lopengl32.

Creating OpenGL 3.3 Context with GLFW in Mac OS X 10.9

I have the following code:
void error_callback(int error, const char* description)
{
fputs(description, stderr);
}
int main( void )
{
// Initialise GLFW
if( !glfwInit() )
{
fprintf( stderr, "Failed to initialize GLFW\n" );
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
//glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// Open a window and create its OpenGL context
glfwSetErrorCallback(error_callback);
window = glfwCreateWindow( 1024, 768, "Tutorial 16 - Shadows", NULL, NULL);
if( window == NULL ){
//fprintf( stderr, "Failed to open GLFW window. If you have an Intel GPU, they are not 3.3 compatible. Try the 2.1 version of the tutorials.\n" );
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Initialize GLEW
GLenum err;
glewExperimental = GL_TRUE; // Needed for core profile
if ((err = glewInit()) != GLEW_OK) {
std::cout << glewGetErrorString(err) << std::endl;
return -1;
}
...
}
The problem is that I'm receiving the following message: https://github.com/glfw/glfw/blob/master/src/nsgl_context.m#L101
And, indeed, GLFW won't give me a OpenGL 3+ context without setting the forward-compatibility flag (in Mac OS X).
Why is that? Is there any way to get a OpenGL 3+ context in Mac OS X 10.9 without forward-compatibility? Is it a limitation of OpenGL implementation for OS X or a problem of GLFW?
This is actually the correct behavior, as defined in the OpenGL Programming Guide for Mac.
Mac OS X simply does not support the compatibility profile for OpenGL 3.x/4.x, so you must request a core (or forward-compatible) context. This implies that you will not be able to use any deprecated functions when programming against OpenGL 3.x/4.x on a Mac.
It might be worth to make a feature request in the GLFW issue tracker to set the core profile flag implicitly when requesting a 3.x/4.x context on Mac OS X.