SetPixelFormat fail when using WGL_SAMPLE_BUFFERS_ARB / WGL_SAMPLES_ARB - c++

I decided to add MSAA support into my application and therefor I checked what's up to do. The examples I found always refer to WGL_SAMPLE_BUFFERS_ARB / WGL_SAMPLES_ARB in the pixelformatdescriptor to do that. I am using glew right now:
int DesiredColorBits = 32;
int DesiredStencilBits = 0;
int DesiredDepthBits = 24;
int MajorVersion=3;
int MinorVersion=3;
PIXELFORMATDESCRIPTOR temppfd =
{
sizeof(PIXELFORMATDESCRIPTOR),
1,
PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
PFD_TYPE_RGBA,
DesiredColorBits,
0, 0, 0, 0, 0, 0,
0, 0,
0, 0, 0, 0, 0,
DesiredDepthBits,
0,//DesiredStencilBits,
0,
PFD_MAIN_PLANE,
0,
0, 0, 0
};
HDC TemphDC;
TemphDC = GetDC( hWnd );
int nPixelFormat = ChoosePixelFormat( TemphDC, &temppfd );
check(nPixelFormat);
verify(SetPixelFormat( TemphDC, nPixelFormat, &temppfd ));
// oldstyle context to init glew.
HGLRC tempContext = wglCreateContext(TemphDC);
wglMakeCurrent(TemphDC, tempContext);
//init glew
glewExperimental = GL_TRUE;
GLenum err = glewInit();
if (GLEW_OK != err)
errorprintf(TEXT("Error: Init glew failed: %s"), glewGetErrorString(err)));
else debugprintf( TEXT("Glew successfully initialized."));
ReleaseDC(hWnd, TemphDC);
//Now init pure OpenGL >= 3.3 context.
if (WGLEW_ARB_create_context && WGLEW_ARB_pixel_format)
{
wglMakeCurrent(NULL, NULL);
wglDeleteContext(tempContext);
PIXELFORMATDESCRIPTOR pfd;
memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));
pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DOUBLEBUFFER | PFD_SUPPORT_OPENGL | PFD_DRAW_TO_WINDOW;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = DesiredColorBits;
pfd.cDepthBits = DesiredDepthBits;
pfd.iLayerType = PFD_MAIN_PLANE;
int iPixelFormatAttribList[30];
iPixelFormatAttribList[0] = WGL_DRAW_TO_WINDOW_ARB;
iPixelFormatAttribList[1] = GL_TRUE;
iPixelFormatAttribList[2] = WGL_SUPPORT_OPENGL_ARB;
iPixelFormatAttribList[3] = GL_TRUE;
iPixelFormatAttribList[4] = WGL_DOUBLE_BUFFER_ARB;
iPixelFormatAttribList[5] = GL_TRUE;
iPixelFormatAttribList[6] = WGL_PIXEL_TYPE_ARB;
iPixelFormatAttribList[7] = WGL_TYPE_RGBA_ARB;
iPixelFormatAttribList[8] = WGL_COLOR_BITS_ARB;
iPixelFormatAttribList[9] = DesiredColorBits;
iPixelFormatAttribList[10] = WGL_DEPTH_BITS_ARB;
iPixelFormatAttribList[11] = DesiredDepthBits;
iPixelFormatAttribList[12] = WGL_STENCIL_BITS_ARB;
iPixelFormatAttribList[13] = 0;
iPixelFormatAttribList[14] = WGL_SAMPLE_BUFFERS_ARB;
iPixelFormatAttribList[15] = GL_TRUE;
iPixelFormatAttribList[16] = WGL_SAMPLES_ARB;
iPixelFormatAttribList[17] = NumAASamples;
iPixelFormatAttribList[18] = 0;
int iPixelFormat, iNumFormats;
wglChoosePixelFormatARB(hDC, iPixelFormatAttribList, NULL, 1, &iPixelFormat, (GLuint*)&iNumFormats);
if( iNumFormats == 0 )
{
debugprintf(TEXT("Couldn't support multisampling"));
}
else debugprintf(TEXT("Available formats: %i %i"),iNumFormats,iPixelFormat);
int iContextAttribs[] =
{
WGL_CONTEXT_MAJOR_VERSION_ARB, MajorVersion,
WGL_CONTEXT_MINOR_VERSION_ARB, MinorVersion,
WGL_CONTEXT_FLAGS_ARB, ContextFlags,
0 // End of attributes list
};
// pfd oldstyle crap...
debugprintf(TEXT("DesiredColorBits: %i"), DesiredColorBits);
debugprintf(TEXT("DesiredDepthBits: %i"), DesiredDepthBits);
if (!SetPixelFormat(hDC, iPixelFormat, &pfd))
{
debugprintf(TEXT("Setting PixelFormat %i failed!"), iPixelFormat);
iPixelFormat = ChoosePixelFormat(hDC, &pfd);
if (!SetPixelFormat(hDC, iPixelFormat, &pfd))
errorprintf(TEXT("Error: SetPixelFormat %i failed."),iPixelFormat);
}
hRC = wglCreateContextAttribsARB(hDC, 0, iContextAttribs);
}
else errorprintf(TEXT("Error: Init glew failed: %s"), glewGetErrorString(err)));
if(hRC)
{
MakeCurrent();
debugprintf( TEXT("GL_VENDOR : %s"), glGetString(GL_VENDOR));
debugprintf( TEXT("GL_RENDERER : %s"), glGetString(GL_RENDERER));
debugprintf( TEXT("GL_VERSION : %s"), glGetString(GL_VERSION));
debugprintf( TEXT("GLEW Version : %s"), glewGetString(GLEW_VERSION));
int NumberOfAASamples=0, NumberOfSampleBufers;
glGetintegerv(GL_SAMPLE_BUFFERS,&NumberOfSampleBufers);
glGetintegerv(GL_SAMPLES,&NumberOfAASamples);
debugprintf(TEXT("SampleBuffers: %i, NumAASamples: (%i/%i)"),NumberOfSampleBufers, NumberOfAASamples,NumAASamples);
int NumberOfExtensions=0;
glGetintegerv(GL_NUM_EXTENSIONS, &NumberOfExtensions);
for (int i = 0; i<NumberOfExtensions; i++)
{
FString ExtensionString = glGetStringi(GL_EXTENSIONS, i));
debugprintf(NAME_DevLoad, TEXT("GL_EXTENSIONS(%i) : %s"), i, ExtensionString);
}
debugprintf( TEXT("OpenGL %i.%i context initialized!"), MajorVersion,MinorVersion);
}
So far, so good, I am getting my tempcontext to init glew, it initializes correctly and is ready to use, also no problems with SetPixelFormat- without sample buffers at least.
Now if I am adding WGL_SAMPLE_BUFFERS_ARB and WGL_SAMPLES_ARB I still get a pixelformat back from wglChoosePixelFormatARB (indeed iNumFormats returns 32), but SetPixelFormat always fails with the pixelformat returned from it.
I tried various color and depth bits, but no matter what I do, I can't seem to find a way to make this work. I also tried this on my NVidia and an Intel card, no AA preset is forced either.
Now- is there a different way to get this result? I take it that SetPixelformat is "deprecated", partially even obsolete from what I read, yet I seem to need it here to enable MSAA, which is a kinda annoying dependency.
Did I miss something?

I solved it by changing one thing, I created a completely standalone window for glew init like this:
LRESULT CALLBACK WndProc(HWND hWnd, UINT uiMsg, WPARAM wParam, LPARAM lParam)
{
switch(uiMsg)
{
case WM_CLOSE:
PostQuitMessage(0);
break;
default:
return DefWindowProc(hWnd, uiMsg, wParam, lParam);
}
return 0;
}
PIXELFORMATDESCRIPTOR temppfd =
{
sizeof(PIXELFORMATDESCRIPTOR),
1,
PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
PFD_TYPE_RGBA,
DesiredColorBits,
0, 0, 0, 0, 0, 0,
0, 0,
0, 0, 0, 0, 0,
DesiredDepthBits,
0,//DesiredStencilBits,
0,
PFD_MAIN_PLANE,
0,
0, 0, 0
};
DWORD Style = WS_OVERLAPPEDWINDOW | WS_CLIPSIBLINGS | WS_CLIPCHILDREN;
WNDCLASSEX WndClassEx;
memset(&WndClassEx, 0, sizeof(WNDCLASSEX));
WndClassEx.cbSize = sizeof(WNDCLASSEX);
WndClassEx.style = CS_OWNDC | CS_HREDRAW | CS_VREDRAW;
WndClassEx.lpfnWndProc = WndProc;
WndClassEx.hInstance = hInstance;
WndClassEx.hIcon = LoadIcon(NULL, IDI_APPLICATION);
WndClassEx.hIconSm = LoadIcon(NULL, IDI_APPLICATION);
WndClassEx.hCursor = LoadCursor(NULL, IDC_ARROW);
WndClassEx.lpszClassName = L"Win32OpenGLWindow";
if(RegisterClassEx(&WndClassEx) == 0)
{
debugprintf(TEXT("RegisterClassEx failed!"));
}
HWND TemphWnd = CreateWindowEx(WS_EX_APPWINDOW, WndClassEx.lpszClassName, L"InitWIndow", Style, 0, 0, SizeX, SizeY, NULL, NULL, hInstance, NULL);
HDC TemphDC = GetDC( TemphWnd );
INT nPixelFormat = ChoosePixelFormat( TemphDC, &temppfd );
check(nPixelFormat);
verify(SetPixelFormat( TemphDC, nPixelFormat, &temppfd ));
// oldstyle context to init glew.
HGLRC tempContext = wglCreateContext(TemphDC);
wglMakeCurrent(TemphDC, tempContext);
//init glew
glewExperimental = GL_TRUE;
GLenum err = glewInit();
if (GLEW_OK != err)
errorprintf(TEXT("Error: Init glew failed: %s"),glewGetErrorString(err));
else debugprintf(TEXT("Glew successfully initialized."));
//Now init pure OpenGL >= 3.3 context.
if (WGLEW_ARB_create_context && WGLEW_ARB_pixel_format)
{
wglMakeCurrent(NULL, NULL);
wglDeleteContext(tempContext);
ReleaseDC(TemphWnd, TemphDC);
DestroyWindow(TemphWnd);
...
After destroying this temp window and then using the actual one for the game like shown above it was possible to use SetPixelFormat with the new options. Why it didn't cause any trouble like this when not adding WGL_SAMPLE_BUFFERS_ARB / WGL_SAMPLES_ARB I don't know, I assume that it is not possible to really change the pixelformat for a window afterwards, unrelated to any hWnd, hDC or context and if those are destroyed or renewed in between. Many examples and tutorials in the web are not clear about that and are showing this indeed the wrong way too.
In short, create a tempwindow, temphWnd and temphDC, then init glew, destroy all temporary stuff, then go on creating the context you really want with the real window, hWnd and hDC, otherwise you'll buy some trouble.
Yes, I think it is a real mess to get clean context in windows.
Either way, I hope this will help others.

Related

Initializing OpenGL without libraries

Every tutorial I find Online uses SDL or GLAD or other libraries to initialize OpenGL. Is there a way of initializing OpenGL with no extra libraries (like DirectX)?
The reason for that, Is that I am building a Game Engine and I want to minimize the external libraries as possible and to be able to handle more error messages.
Initializing OpenGL differs from one operating system to another, that's why people made these libraries to ease other developers work.
Some tutorials that will help you.
For Windows (WGL)
https://www.khronos.org/opengl/wiki/Creating_an_OpenGL_Context
http://www.rastertek.com/gl40tut03.html
For Linux (GLX)
https://www.khronos.org/opengl/wiki/Tutorial:_OpenGL_3.0_Context_Creation_(GLX)
There isn't much quality information about doing this. I found a great piece of code called windows_modern_opengl_context.c on github by the user nickrolfe. I thought I'd share it here to make it easier for others to find. If you link -lopengl32 and make sure you have -mwindows flag up it should compile fine with a modern c compiler. I'm using gcc -std=c11. If you want to use C++ you may have to edit the code to not use designated initializers when initializing the structs. I'm not sure.
EDIT:
This code creates an empty window using the win32 api with a modern opengl context (Version 3.3). You will still need to load the appropriate gl extensions in order to call functions such as glCreateShader(...); I recommend using glad to do this. If you use glad like I said you should use gladLoadGL() instead of gladLoadGLLoader() to load the extensions.
// Sample code showing how to create a modern OpenGL window and rendering context on Win32.
#include <windows.h>
#include <gl/gl.h>
#include <stdbool.h>
typedef HGLRC WINAPI wglCreateContextAttribsARB_type(HDC hdc, HGLRC hShareContext, const int *attribList);
wglCreateContextAttribsARB_type *wglCreateContextAttribsARB;
// See https://www.opengl.org/registry/specs/ARB/wgl_create_context.txt for all values
#define WGL_CONTEXT_MAJOR_VERSION_ARB 0x2091
#define WGL_CONTEXT_MINOR_VERSION_ARB 0x2092
#define WGL_CONTEXT_PROFILE_MASK_ARB 0x9126
#define WGL_CONTEXT_CORE_PROFILE_BIT_ARB 0x00000001
typedef BOOL WINAPI wglChoosePixelFormatARB_type(HDC hdc, const int *piAttribIList, const FLOAT *pfAttribFList, UINT nMaxFormats, int *piFormats, UINT *nNumFormats);
wglChoosePixelFormatARB_type *wglChoosePixelFormatARB;
// See https://www.opengl.org/registry/specs/ARB/wgl_pixel_format.txt for all values
#define WGL_DRAW_TO_WINDOW_ARB 0x2001
#define WGL_ACCELERATION_ARB 0x2003
#define WGL_SUPPORT_OPENGL_ARB 0x2010
#define WGL_DOUBLE_BUFFER_ARB 0x2011
#define WGL_PIXEL_TYPE_ARB 0x2013
#define WGL_COLOR_BITS_ARB 0x2014
#define WGL_DEPTH_BITS_ARB 0x2022
#define WGL_STENCIL_BITS_ARB 0x2023
#define WGL_FULL_ACCELERATION_ARB 0x2027
#define WGL_TYPE_RGBA_ARB 0x202B
static void
fatal_error(char *msg)
{
MessageBoxA(NULL, msg, "Error", MB_OK | MB_ICONEXCLAMATION);
exit(EXIT_FAILURE);
}
static void
init_opengl_extensions(void)
{
// Before we can load extensions, we need a dummy OpenGL context, created using a dummy window.
// We use a dummy window because you can only set the pixel format for a window once. For the
// real window, we want to use wglChoosePixelFormatARB (so we can potentially specify options
// that aren't available in PIXELFORMATDESCRIPTOR), but we can't load and use that before we
// have a context.
WNDCLASSA window_class = {
.style = CS_HREDRAW | CS_VREDRAW | CS_OWNDC,
.lpfnWndProc = DefWindowProcA,
.hInstance = GetModuleHandle(0),
.lpszClassName = "Dummy_WGL_djuasiodwa",
};
if (!RegisterClassA(&window_class)) {
fatal_error("Failed to register dummy OpenGL window.");
}
HWND dummy_window = CreateWindowExA(
0,
window_class.lpszClassName,
"Dummy OpenGL Window",
0,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
0,
0,
window_class.hInstance,
0);
if (!dummy_window) {
fatal_error("Failed to create dummy OpenGL window.");
}
HDC dummy_dc = GetDC(dummy_window);
PIXELFORMATDESCRIPTOR pfd = {
.nSize = sizeof(pfd),
.nVersion = 1,
.iPixelType = PFD_TYPE_RGBA,
.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
.cColorBits = 32,
.cAlphaBits = 8,
.iLayerType = PFD_MAIN_PLANE,
.cDepthBits = 24,
.cStencilBits = 8,
};
int pixel_format = ChoosePixelFormat(dummy_dc, &pfd);
if (!pixel_format) {
fatal_error("Failed to find a suitable pixel format.");
}
if (!SetPixelFormat(dummy_dc, pixel_format, &pfd)) {
fatal_error("Failed to set the pixel format.");
}
HGLRC dummy_context = wglCreateContext(dummy_dc);
if (!dummy_context) {
fatal_error("Failed to create a dummy OpenGL rendering context.");
}
if (!wglMakeCurrent(dummy_dc, dummy_context)) {
fatal_error("Failed to activate dummy OpenGL rendering context.");
}
wglCreateContextAttribsARB = (wglCreateContextAttribsARB_type*)wglGetProcAddress("wglCreateContextAttribsARB");
wglChoosePixelFormatARB = (wglChoosePixelFormatARB_type*)wglGetProcAddress("wglChoosePixelFormatARB");
wglMakeCurrent(dummy_dc, 0);
wglDeleteContext(dummy_context);
ReleaseDC(dummy_window, dummy_dc);
DestroyWindow(dummy_window);
}
static HGLRC
init_opengl(HDC real_dc)
{
init_opengl_extensions();
// Now we can choose a pixel format the modern way, using wglChoosePixelFormatARB.
int pixel_format_attribs[] = {
WGL_DRAW_TO_WINDOW_ARB, GL_TRUE,
WGL_SUPPORT_OPENGL_ARB, GL_TRUE,
WGL_DOUBLE_BUFFER_ARB, GL_TRUE,
WGL_ACCELERATION_ARB, WGL_FULL_ACCELERATION_ARB,
WGL_PIXEL_TYPE_ARB, WGL_TYPE_RGBA_ARB,
WGL_COLOR_BITS_ARB, 32,
WGL_DEPTH_BITS_ARB, 24,
WGL_STENCIL_BITS_ARB, 8,
0
};
int pixel_format;
UINT num_formats;
wglChoosePixelFormatARB(real_dc, pixel_format_attribs, 0, 1, &pixel_format, &num_formats);
if (!num_formats) {
fatal_error("Failed to set the OpenGL 3.3 pixel format.");
}
PIXELFORMATDESCRIPTOR pfd;
DescribePixelFormat(real_dc, pixel_format, sizeof(pfd), &pfd);
if (!SetPixelFormat(real_dc, pixel_format, &pfd)) {
fatal_error("Failed to set the OpenGL 3.3 pixel format.");
}
// Specify that we want to create an OpenGL 3.3 core profile context
int gl33_attribs[] = {
WGL_CONTEXT_MAJOR_VERSION_ARB, 3,
WGL_CONTEXT_MINOR_VERSION_ARB, 3,
WGL_CONTEXT_PROFILE_MASK_ARB, WGL_CONTEXT_CORE_PROFILE_BIT_ARB,
0,
};
HGLRC gl33_context = wglCreateContextAttribsARB(real_dc, 0, gl33_attribs);
if (!gl33_context) {
fatal_error("Failed to create OpenGL 3.3 context.");
}
if (!wglMakeCurrent(real_dc, gl33_context)) {
fatal_error("Failed to activate OpenGL 3.3 rendering context.");
}
return gl33_context;
}
static LRESULT CALLBACK
window_callback(HWND window, UINT msg, WPARAM wparam, LPARAM lparam)
{
LRESULT result = 0;
switch (msg) {
case WM_CLOSE:
case WM_DESTROY:
PostQuitMessage(0);
break;
default:
result = DefWindowProcA(window, msg, wparam, lparam);
break;
}
return result;
}
static HWND
create_window(HINSTANCE inst)
{
WNDCLASSA window_class = {
.style = CS_HREDRAW | CS_VREDRAW | CS_OWNDC,
.lpfnWndProc = window_callback,
.hInstance = inst,
.hCursor = LoadCursor(0, IDC_ARROW),
.hbrBackground = 0,
.lpszClassName = "WGL_fdjhsklf",
};
if (!RegisterClassA(&window_class)) {
fatal_error("Failed to register window.");
}
// Specify a desired width and height, then adjust the rect so the window's client area will be
// that size.
RECT rect = {
.right = 1024,
.bottom = 576,
};
DWORD window_style = WS_OVERLAPPEDWINDOW;
AdjustWindowRect(&rect, window_style, false);
HWND window = CreateWindowExA(
0,
window_class.lpszClassName,
"OpenGL",
window_style,
CW_USEDEFAULT,
CW_USEDEFAULT,
rect.right - rect.left,
rect.bottom - rect.top,
0,
0,
inst,
0);
if (!window) {
fatal_error("Failed to create window.");
}
return window;
}
int WINAPI
WinMain(HINSTANCE inst, HINSTANCE prev, LPSTR cmd_line, int show)
{
HWND window = create_window(inst);
HDC gldc = GetDC(window);
HGLRC glrc = init_opengl(gldc);
ShowWindow(window, show);
UpdateWindow(window);
bool running = true;
while (running) {
MSG msg;
while (PeekMessageA(&msg, 0, 0, 0, PM_REMOVE)) {
if (msg.message == WM_QUIT) {
running = false;
} else {
TranslateMessage(&msg);
DispatchMessageA(&msg);
}
}
glClearColor(1.0f, 0.5f, 0.5f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Do OpenGL rendering here
SwapBuffers(gldc);
}
return 0;
}

wglCreateContextAttribsARB crashes in debug mode, under x64 platform

i have encountered an issue trying to debug my 64 bit application under Visual Studio 2015. When i switch it to debug mode, it crashes during window creation. Which did not happen in 32 bit mode;
Here's redacted window initialization code:
int indexPF;
WNDCLASS WinClass;
WinClass.style = CS_OWNDC | CS_PARENTDC;
WinClass.lpfnWndProc = WndProc;
WinClass.cbClsExtra = 0;
WinClass.cbWndExtra = 0;
WinClass.hInstance = hInstance;
WinClass.hIcon = LoadIcon(NULL, IDC_ICON);
WinClass.hCursor = LoadCursor(NULL, IDC_ARROW);
WinClass.hbrBackground = (HBRUSH)GetStockObject(5);
WinClass.lpszMenuName = NULL;
WinClass.lpszClassName = "N2";
if (!RegisterClass(&WinClass))
{
...report error and terminate...
}
Logger::Inst() << " ~RegisterClass;" << endl;
//CREATE WINDOW
if (fullScreen)
{
if ((hwnd = CreateWindowEx(WS_EX_LEFT, "N2", "N2",
WS_POPUP,
0, 0, width, height,
NULL, NULL, hInstance, NULL)) == 0)
{
...report error and terminate...
}
}
else
{
if ((hwnd = CreateWindowEx(WS_EX_LEFT, "N2", "N2",
WS_OVERLAPPEDWINDOW,
0, 0, width, height,
NULL, NULL, hInstance, NULL)) == 0)
{
...report error and terminate...
}
}
Logger::Inst() << " ~CreateWindow;" << endl;
//PFD SETUP
PIXELFORMATDESCRIPTOR pfd =
{
sizeof(PIXELFORMATDESCRIPTOR),
1,
PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
PFD_TYPE_RGBA,
32,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
24,
8, 0, PFD_MAIN_PLANE, 0, 0, 0x00FF00FF, 0
};
//HDC
if ((hdc = GetDC(hwnd)) == NULL)
{
...report error and terminate...
}
Logger::Inst() << " ~GotHDC;" << endl;
//SET PIXEL FORMAT
indexPF = ChoosePixelFormat(hdc, &pfd);
if (!indexPF)
{
...report error and terminate...
}
if (!SetPixelFormat(hdc, indexPF, &pfd))
{
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER | PFD_SWAP_EXCHANGE;
indexPF = ChoosePixelFormat(hdc, &pfd);
if (!SetPixelFormat(hdc, indexPF, &pfd))
{
...report error and terminate...
}
}
Logger::Inst() << " ~SetPFD;" << endl;
//TEMP CONTEXT TO ACQUIRE POINTER
HGLRC tempContext = wglCreateContext(hdc);
if (!tempContext) {
...report error and terminate...
}
if (!wglMakeCurrent(hdc, tempContext)) {
...report error and terminate...
}
int major, minor; glGetIntegerv(GL_MAJOR_VERSION, &major); glGetIntegerv(GL_MINOR_VERSION, &minor);
if (major < 4 || minor < 1) {
...report error and terminate...
}
const int attribs[] =
{
WGL_CONTEXT_MAJOR_VERSION_ARB, major,
WGL_CONTEXT_MINOR_VERSION_ARB, minor,
WGL_CONTEXT_PROFILE_MASK_ARB, WGL_CONTEXT_CORE_PROFILE_BIT_ARB,
0, 0
};
PFNWGLCREATEBUFFERREGIONARBPROC wglCreateContextAttribsARB = (PFNWGLCREATEBUFFERREGIONARBPROC)wglGetProcAddress("wglCreateContextAttribsARB");
if (!wglCreateContextAttribsARB) {
...report error and terminate...
}
**!!! CRASH HERE !!!**
if (!(hglrc = (HGLRC)wglCreateContextAttribsARB(hdc, 0, (UINT)attribs))) {
...report error and terminate...
}
Debugger shows it happens exactly at wglCreateContextAttribsARB(nvoglv64.dll!0000000074ccbdfa). That is a complete mystery to me. My only clue is that after switching to x64 it requires attributes passed as "UINT" instead of "const int*", i don't get that part, not even sure how conversion between each other supposed to work, seems fishy. Ideas?
You've got a typo: the type for wglCreateContextAttribsARB is wrong. It should be PFNWGLCREATECONTEXTATTRIBSARBPROC.
The reason it worked when you were targeting 32-bit is because the signature of the two is essentially the same in 32-bit:
wglCreateContextAttribsARB:
typedef HGLRC (WINAPI * PFNWGLCREATECONTEXTATTRIBSARBPROC) (HDC hDC, HGLRC hShareContext, const int *attribList);
wglCreateBufferRegionARB:
typedef HANDLE (WINAPI * PFNWGLCREATEBUFFERREGIONARBPROC) (HDC hDC, int iLayerPlane, UINT uType);
The second and third parameters to wglCreateContextAttribsARB are pointers (an HGLRC is a handle, which is just a pointer) and the second and third parameters to wglCreateBufferRegionARB are integers. In 32-bit, pointers are 32-bit in size, so the signatures are essentially the same.
In 64-bit, pointers are 64-bit in size, but integers are still 32-bit (assuming you're using MSVC) so those two pointers you were passing in were being truncated.

GL_INVALID_ENUM/GL_INVALID_OPERATION after OpenGL 3.1 context creation

I have another hopefully not THAT stupid question- I wanted to clean up some old code I updated from OpenGL1.3 to 3.1. So I changed now in a final step the context creation to something like this (own code below):
https://www.opengl.org/wiki/Tutorial:_OpenGL_3.1_The_First_Triangle_%28C%2B%2B/Win%29
I am using GLEW 1.13.0 and if I do only glewInit additionally to the old context creation everything works fine, but after switching to a pure 3.1 context:
glHint(GL_PERSPECTIVE_CORRECTION_HINT,GL_NICEST); is giving me GL_INVALID_ENUM
glAlphaFunc( GL_GREATER, 0.5 ); is giving me GL_INVALID_OPERATION
glDisable( GL_ALPHA_TEST ); is giving me GL_INVALID_ENUM
glEnable(GL_NORMALIZE); is giving me GL_INVALID_ENUM
now from what I read none of these is deprecated in OpenGL3.1 - What did I miss?
It currently looks like this:
hWnd = (HWND)GetWindow();
hDC = GetDC( hWnd );
INT DesiredColorBits = 32;
INT DesiredStencilBits = 8;
INT DesiredDepthBits = 24;
PIXELFORMATDESCRIPTOR pfd;
memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));
pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = DesiredColorBits;
pfd.cDepthBits = DesiredDepthBits;
pfd.iLayerType = PFD_MAIN_PLANE;
pfd.cStencilBits = DesiredStencilBits;
INT nPixelFormat = ChoosePixelFormat( hDC, &pfd );
printf("Using pixel format %i", nPixelFormat);
if (!SetPixelFormat( hDC, nPixelFormat, &pfd ))
return 0;
hRC = wglCreateContext( hDC );
wglMakeCurrent(hDC, hRC);
//init glew
glewExperimental = GL_TRUE;
GLenum err = glewInit();
if (GLEW_OK != err)
return 0;
INT MajorVersion=3;
INT MinorVersion=1;
if (WGLEW_ARB_create_context && WGLEW_ARB_pixel_format)
{
memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));
pfd.nSize= sizeof(PIXELFORMATDESCRIPTOR);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DOUBLEBUFFER | PFD_SUPPORT_OPENGL | PFD_DRAW_TO_WINDOW;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = 32;
pfd.cDepthBits = 24;
pfd.iLayerType = PFD_MAIN_PLANE;
const INT iPixelFormatAttribList[] =
{
WGL_DRAW_TO_WINDOW_ARB, GL_TRUE,
WGL_SUPPORT_OPENGL_ARB, GL_TRUE,
WGL_DOUBLE_BUFFER_ARB, GL_TRUE,
WGL_PIXEL_TYPE_ARB, WGL_TYPE_RGBA_ARB,
WGL_COLOR_BITS_ARB, DesiredColorBits,
WGL_DEPTH_BITS_ARB, DesiredDepthBits,
WGL_STENCIL_BITS_ARB, DesiredStencilBits,
0 // End of attributes list
};
INT iContextAttribs[] =
{
WGL_CONTEXT_MAJOR_VERSION_ARB, MajorVersion,
WGL_CONTEXT_MINOR_VERSION_ARB, MinorVersion,
WGL_CONTEXT_FLAGS_ARB, WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB,
0 // End of attributes list
};
INT iPixelFormat, iNumFormats;
wglChoosePixelFormatARB(hDC, iPixelFormatAttribList, NULL, 1, &iPixelFormat, (UINT*)&iNumFormats);
// pfd oldstyle crap...
if (!SetPixelFormat(hDC, iPixelFormat, &pfd))
return 0;
hRC = wglCreateContextAttribsARB(hDC, 0, iContextAttribs);
}
if(hRC)
{
printf("GL_VENDOR : %s", glGetString(GL_VENDOR));
printf("GL_RENDERER : %s", glGetString(GL_RENDERER));
printf("GL_VERSION : %s", glGetString(GL_VERSION));
printf("GLEW Version : %s", glewGetString(GLEW_VERSION));
wglMakeCurrent(hDC, hRC);
}
else
return 0;
I stripped out the code, so sorry if I missed something, but the context is initialized and I am getting:
Using pixel format 9
GL_VENDOR : NVIDIA Corporation
GL_RENDERER : GeForce GT 740M/PCIe/SSE2
GL_VERSION : 4.4.0
GLEW Version : 1.13.0
The first part in the code initializes the old style context in order to get GLEW initialized and while I am a bit unsure about the PFD part in the 3.1 creation (although many examples show it like this). Nevertheless I tried a few different variants from different examples and tutorials and it always resulted in the GL_INVALID_OPERATION and GL_INVALID_ENUM when trying to set the states above.
GL_PERSPECTIVE_CORRECTION_HINT isn't in core anymore, and neither it was in 3.1. Refer to the OpenGL wiki for details, but allowed enums in 3.1 (and staying the same up to 4.5) are:
GL_LINE_SMOOTH_HINT
GL_POLYGON_SMOOTH_HINT
GL_TEXTURE_COMPRESSION_HINT
GL_FRAGMENT_SHADER_DERIVATIVE_HINT
That being said, don't bother with creating 3.1 contexts. If you can, go for 4.4/4.5, if you want to support previous generation, 3.3 is the reasonable minimum.

Compile opengl program on Windows that was created in Ubuntu

I had made a program using opengl in Ubuntu. Now, I want to produce .exe file from it. Would it be possible if I use g++ compiler in Windows and compile it ?
Depends on your code. Usually the device context is created on each platform in a different way
Windows:
hRC_ = wglCreateContextAttribsARB(hDC_, 0, attributes); // Create and OpenGL 3.x context based on the given attributes
if(hRC_ == 0)
{
GLenum error = glGetError();
// ... handle error
}
wglMakeCurrent(nullptr, nullptr); // Remove the temporary context from being active
wglDeleteContext(tempContext); // Delete the temporary OpenGL 2.1 context
wglMakeCurrent(hDC_, hRC_); // Make our OpenGL 3.0 context current
X11/GLX:
const char *glxExts = glXQueryExtensionsString( display_, DefaultScreen( display_ ) );
if(isExtensionSupported( glxExts, "GLX_ARB_create_context" )) // If the OpenGL 3.x context creation extension is available
{
glXCreateContextAttribsARB = (GLXContext(*)(Display* dpy, GLXFBConfig config, GLXContext share_context, Bool direct, const int *attrib_list))glXGetProcAddressARB((GLubyte*)"glXCreateContextAttribsARB");
hRC_ = glXCreateContextAttribsARB(display_, fbConfigs_[0], 0, true, attributes); // Create and OpenGL 3.x context based on the given attributes
if(!glXMakeCurrent(display_, window_, hRC_))
{
throw BlueFramework::BlueCore::Exception("Making context current failed.");
}; // Make our OpenGL 3.0 context current
GLenum err = glewInit(); // Enable GLEW
if (GLEW_OK != err) // If GLEW fails
{
std::cerr << err << std::endl;
std::cerr << "Error: " << glewGetString(err) << std::endl;
throw BlueFramework::BlueCore::Exception("GLEW is not initialized!");
}
}
There are also differences in the attributes handed over to XXXCreateContextAttribs:
Windows:
WGL_CONTEXT_MAJOR_VERSION_ARB, majorVersion_, // Set the MAJOR version of OpenGL
WGL_CONTEXT_MINOR_VERSION_ARB, miniorVersion_, // Set the MINOR version of OpenGL
WGL_CONTEXT_FLAGS_ARB,
WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB // Set our OpenGL context to be forward com
WGL_CONTEXT_PROFILE_MASK_ARB,
WGL_CONTEXT_CORE_PROFILE_BIT_ARB
...
X11/GLX:
GLX_CONTEXT_MAJOR_VERSION_ARB, majorVersion_, // Set the MAJOR version of OpenGL
GLX_CONTEXT_MINOR_VERSION_ARB, miniorVersion_, // Set the MINOR version of OpenGL
GLX_CONTEXT_FLAGS_ARB,
GLX_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB // Set our OpenGL context to be forward compatible
//#ifdef _DEBUG
| GLX_CONTEXT_DEBUG_BIT_ARB
//#endif
,
GLX_CONTEXT_PROFILE_MASK_ARB,
GLX_CONTEXT_CORE_PROFILE_BIT_ARB,
0
...
After device creation the OpenGL dependent code is almost the same. To create a render context in Win you need to create a Window handle.
Can look something like this:
// Register class
WNDCLASSEX wcex;
wcex.cbSize = sizeof( WNDCLASSEX );
wcex.style = CS_HREDRAW | CS_VREDRAW | CS_OWNDC;
wcex.lpfnWndProc = (WNDPROC)RenderWindow::controlEventsProcessor;//WndProc;
wcex.cbClsExtra = 0;
wcex.cbWndExtra = 0;
wcex.hInstance = hInstance;
wcex.hIcon = nullptr;//LoadIcon( hInstance, ( LPCTSTR )IDI_TUTORIAL1 );
wcex.hCursor = LoadCursor( NULL, IDC_ARROW );
if(fullscreen_)
{
wcex.hbrBackground = ( HBRUSH )( COLOR_WINDOW + 1 );
}
else
{
wcex.hbrBackground = ( HBRUSH )( COLOR_WINDOW + 1 );
}
wcex.lpszMenuName = NULL;
wcex.lpszClassName = wstrWindowClassName_.c_str(); //ss.str().c_str();
wcex.hIconSm = nullptr;//LoadIcon( wcex.hInstance, ( LPCTSTR )IDI_TUTORIAL1 );
if( !RegisterClassEx( &wcex ) )
{
BLUE_LOG_STREAM_EX(
"BlueFramework::BlueGraphicsEngine::RenderWindow::initWindow",
buw::eLogSeverityLevel::Error)
<< "RegisterClassEx failed";
return E_FAIL;
}
// Create window
hInstance = hInstance;
RECT rc = { 0, 0, width, height };
if(!AdjustWindowRect( &rc, WS_OVERLAPPEDWINDOW, FALSE ))
{
BLUE_LOG_STREAM_EX(
"BlueFramework::BlueGraphicsEngine::RenderWindow::initWindow",
buw::eLogSeverityLevel::Error)
<< "AdjustWindowRect failed";
}
if(fullscreen_)
{
hWnd_ = CreateWindowEx(0,
wstrWindowClassName_.c_str(),//L"BlueRenderWindow",//ss.str().c_str(),
L"Render Window",
WS_EX_TOPMOST | WS_POPUP, // fullscreen values
0, 0, // the starting x and y positions should be 0
rc.right - rc.left,
rc.bottom - rc.top,
nullptr,
nullptr,
hInstance,
nullptr);
}
else
{
hWnd_ = CreateWindowEx(0,
wstrWindowClassName_.c_str(),//L"BlueRenderWindow",//ss.str().c_str(),
L"Render Window",
WS_OVERLAPPEDWINDOW,
CW_USEDEFAULT,
CW_USEDEFAULT,
rc.right - rc.left,
rc.bottom - rc.top,
nullptr,
nullptr,
hInstance,
nullptr);
}
The counter part in X11/GLX can look something like this:
int attribs[] = {
GLX_RENDER_TYPE, GLX_RGBA_BIT,
GLX_X_RENDERABLE, True,
GLX_DRAWABLE_TYPE, GLX_WINDOW_BIT,
GLX_X_VISUAL_TYPE, GLX_TRUE_COLOR,
GLX_DOUBLEBUFFER, True,
GLX_RED_SIZE, 8,
GLX_BLUE_SIZE, 8,
GLX_GREEN_SIZE, 8,
GLX_ALPHA_SIZE, 8,
GLX_DEPTH_SIZE, 24,
GLX_STENCIL_SIZE, 8,
0L //= None
};
display_ = XOpenDisplay(nullptr);
if(display_ == nullptr)
{
BLUE_LOG_STREAM_EX(
"BlueFramework::BlueGraphicsEngine::RenderWindow::initWindow",
buw::eLogSeverityLevel::Error)
<< "Cannot connect to X-Server";
return false;
}
int screenNr = XDefaultScreen (display_);
int numConfigs = 0;
// glXCreateContextAttribsARB = (GLXContext(*)(Display* dpy, GLXFBConfig config, GLXContext share_context, Bool direct, const int *attrib_list))glXGetProcAddressARB((GLubyte*)"glXCreateContextAttribsARB");
glXChooseFBConfig = (GLXFBConfig*(*)(Display *dpy, int screen, const int *attrib_list, int *nelements))glXGetProcAddressARB((GLubyte*)"glXChooseFBConfig");
glXGetVisualFromFBConfig = (XVisualInfo*(*)(Display *dpy, GLXFBConfig config))glXGetProcAddressARB((GLubyte*)"glXGetVisualFromFBConfig");
fbConfigs_ = glXChooseFBConfig(display_, XDefaultScreen(display_), attribs, &numConfigs);
if(fbConfigs_ == nullptr)
{
BLUE_LOG_STREAM_EX(
"BlueFramework::BlueGraphicsEngine::RenderWindow::initWindow",
buw::eLogSeverityLevel::Error)
<< "Failed to get framebuffer config";
return false;
}
XVisualInfo* vi = glXGetVisualFromFBConfig(display_, fbConfigs_[0]);
if(vi == nullptr)
{
BLUE_LOG_STREAM_EX(
"BlueFramework::BlueGraphicsEngine::RenderWindow::initWindow",
buw::eLogSeverityLevel::Error)
<< "No appropriate visual found (X-Server)";
return false;
}
bool doubleBuffered = true;
Colormap cmap = XCreateColormap (display_, RootWindow (display_, vi->screen),
vi->visual, AllocNone);
XSetWindowAttributes attributes;
attributes.background_pixmap = 0L;
attributes.colormap = cmap;
attributes.border_pixel = 0;
The easy way would be with cygwin and freeglut, as well as, perhaps, glew.

OpenGL creating render context fail

I'm writing some OpenGL 3 program on C++, and now i have an issue on my laptop Lenovo Thinkpad e320 (Intel HD Graphics 3000). It works fine on my PC (ATI Radeon HD 5870).
Code near the error is following:
bool GLWindowCreate(const char *title, int width, int height, bool fullScreen){
...
g_hRC = wglCreateContextAttribsARB(g_hDC, 0, attribs);
if (!g_hRC || !wglMakeCurrent(g_hDC, g_hRC))
{
LOG_ERROR("Creating render context fail (%d)\n", GetLastError());
return false;
}
...
}
All compiles fine and i saw this error in log file.
I'm using Windows 8 (both on PC and laptop). Graphic card on laptop supports OpenGL 3. I have found some answer about same problem that i need to turn off hardware acceleration, but it seems there is no way to do this in Win 8.
Added:
Whole window create function:
bool GLWindowCreate(const char *title, int width, int height, bool fullScreen)
{
ASSERT(title);
ASSERT(width > 0);
ASSERT(height > 0);
WNDCLASSEX wcx;
PIXELFORMATDESCRIPTOR pfd;
RECT rect;
HGLRC hRCTemp;
DWORD style, exStyle;
int x, y, format;
memset(&g_window, 0, sizeof(g_window));
memset(&g_input, 0, sizeof(g_input));
PFNWGLCREATECONTEXTATTRIBSARBPROC wglCreateContextAttribsARB = NULL;
// attributes for OpenGL context
int attribs[] =
{
WGL_CONTEXT_MAJOR_VERSION_ARB, 3,
WGL_CONTEXT_MINOR_VERSION_ARB, 3,
WGL_CONTEXT_FLAGS_ARB, WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB,
WGL_CONTEXT_PROFILE_MASK_ARB, WGL_CONTEXT_CORE_PROFILE_BIT_ARB,
0
};
// timer init
QueryPerformanceFrequency(&g_qpc);
ASSERT(g_qpc.QuadPart > 0);
g_timerFrequency = 1.0 / g_qpc.QuadPart;
g_hInstance = (HINSTANCE)GetModuleHandle(NULL);
memset(&wcx, 0, sizeof(wcx));
wcx.cbSize = sizeof(wcx);
wcx.style = CS_HREDRAW | CS_VREDRAW | CS_OWNDC;
wcx.lpfnWndProc = (WNDPROC)GLWindowProc;
wcx.hInstance = g_hInstance;
wcx.lpszClassName = GLWINDOW_CLASS_NAME;
wcx.hIcon = LoadIcon(NULL, IDI_APPLICATION);
wcx.hCursor = LoadCursor(NULL, IDC_ARROW);
if (!RegisterClassEx(&wcx))
{
LOG_ERROR("RegisterClassEx fail (%d)\n", GetLastError());
return false;
}
style = WS_CAPTION | WS_SYSMENU | WS_MINIMIZEBOX;
exStyle = WS_EX_APPWINDOW;
x = (GetSystemMetrics(SM_CXSCREEN) - width) / 2;
y = (GetSystemMetrics(SM_CYSCREEN) - height) / 2;
rect.left = x;
rect.right = x + width;
rect.top = y;
rect.bottom = y + height;
AdjustWindowRectEx (&rect, style, FALSE, exStyle);
// creating window
g_hWnd = CreateWindowEx(exStyle, GLWINDOW_CLASS_NAME, title, style, rect.left, rect.top,
rect.right - rect.left, rect.bottom - rect.top, NULL, NULL, g_hInstance, NULL);
if (!g_hWnd)
{
LOG_ERROR("CreateWindowEx fail (%d)\n", GetLastError());
return false;
}
// get window descriptor
g_hDC = GetDC(g_hWnd);
if (!g_hDC)
{
LOG_ERROR("GetDC fail (%d)\n", GetLastError());
return false;
}
memset(&pfd, 0, sizeof(pfd));
pfd.nSize = sizeof(pfd);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = 32;
pfd.cDepthBits = 24;
// get pixel format
format = ChoosePixelFormat(g_hDC, &pfd);
if (!format || !SetPixelFormat(g_hDC, format, &pfd))
{
LOG_ERROR("Setting pixel format fail (%d)\n", GetLastError());
return false;
}
// creating temp context
// to get wglCreateContextAttribsARB function
hRCTemp = wglCreateContext(g_hDC);
if (!hRCTemp || !wglMakeCurrent(g_hDC, hRCTemp))
{
LOG_ERROR("Сreating temp render context fail (%d)\n", GetLastError());
return false;
}
OPENGL_GET_PROC(PFNWGLCREATECONTEXTATTRIBSARBPROC, wglCreateContextAttribsARB);
// delete temp context
wglMakeCurrent(NULL, NULL);
wglDeleteContext(hRCTemp);
// creating OpenGL 3 context
g_hRC = wglCreateContextAttribsARB(g_hDC, 0, attribs);
if (!g_hRC || !wglMakeCurrent(g_hDC, g_hRC))
{
LOG_ERROR("Creating render context fail (%d)\n", GetLastError());
return false;
}
int major, minor;
glGetIntegerv(GL_MAJOR_VERSION, &major);
glGetIntegerv(GL_MINOR_VERSION, &minor);
LOG_DEBUG("OpenGL render context information:\n"
" Renderer : %s\n"
" Vendor : %s\n"
" Version : %s\n"
" GLSL version : %s\n"
" OpenGL version : %d.%d\n",
(const char*)glGetString(GL_RENDERER),
(const char*)glGetString(GL_VENDOR),
(const char*)glGetString(GL_VERSION),
(const char*)glGetString(GL_SHADING_LANGUAGE_VERSION),
major, minor
);
if (!OpenGLInitExtensions())
return false;
GLWindowSetSize(width, height, fullScreen);
return true;
}
I have accidentally found a decision. Problem was there:
int attribs[] =
{
WGL_CONTEXT_MAJOR_VERSION_ARB, 3,
WGL_CONTEXT_MINOR_VERSION_ARB, 3,
WGL_CONTEXT_FLAGS_ARB, WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB,
WGL_CONTEXT_PROFILE_MASK_ARB, WGL_CONTEXT_CORE_PROFILE_BIT_ARB,
0
};
Intel HD Graphics 3000 supports only OpenGL 3.1, not 3.3, so i have had to change
WGL_CONTEXT_MINOR_VERSION_ARB, 3,
to
WGL_CONTEXT_MINOR_VERSION_ARB, 1,
Thanks everyone and sorry for worry, hope my problem solution will help someone