GL_INVALID_ENUM/GL_INVALID_OPERATION after OpenGL 3.1 context creation - c++

I have another hopefully not THAT stupid question- I wanted to clean up some old code I updated from OpenGL1.3 to 3.1. So I changed now in a final step the context creation to something like this (own code below):
https://www.opengl.org/wiki/Tutorial:_OpenGL_3.1_The_First_Triangle_%28C%2B%2B/Win%29
I am using GLEW 1.13.0 and if I do only glewInit additionally to the old context creation everything works fine, but after switching to a pure 3.1 context:
glHint(GL_PERSPECTIVE_CORRECTION_HINT,GL_NICEST); is giving me GL_INVALID_ENUM
glAlphaFunc( GL_GREATER, 0.5 ); is giving me GL_INVALID_OPERATION
glDisable( GL_ALPHA_TEST ); is giving me GL_INVALID_ENUM
glEnable(GL_NORMALIZE); is giving me GL_INVALID_ENUM
now from what I read none of these is deprecated in OpenGL3.1 - What did I miss?
It currently looks like this:
hWnd = (HWND)GetWindow();
hDC = GetDC( hWnd );
INT DesiredColorBits = 32;
INT DesiredStencilBits = 8;
INT DesiredDepthBits = 24;
PIXELFORMATDESCRIPTOR pfd;
memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));
pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = DesiredColorBits;
pfd.cDepthBits = DesiredDepthBits;
pfd.iLayerType = PFD_MAIN_PLANE;
pfd.cStencilBits = DesiredStencilBits;
INT nPixelFormat = ChoosePixelFormat( hDC, &pfd );
printf("Using pixel format %i", nPixelFormat);
if (!SetPixelFormat( hDC, nPixelFormat, &pfd ))
return 0;
hRC = wglCreateContext( hDC );
wglMakeCurrent(hDC, hRC);
//init glew
glewExperimental = GL_TRUE;
GLenum err = glewInit();
if (GLEW_OK != err)
return 0;
INT MajorVersion=3;
INT MinorVersion=1;
if (WGLEW_ARB_create_context && WGLEW_ARB_pixel_format)
{
memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));
pfd.nSize= sizeof(PIXELFORMATDESCRIPTOR);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DOUBLEBUFFER | PFD_SUPPORT_OPENGL | PFD_DRAW_TO_WINDOW;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = 32;
pfd.cDepthBits = 24;
pfd.iLayerType = PFD_MAIN_PLANE;
const INT iPixelFormatAttribList[] =
{
WGL_DRAW_TO_WINDOW_ARB, GL_TRUE,
WGL_SUPPORT_OPENGL_ARB, GL_TRUE,
WGL_DOUBLE_BUFFER_ARB, GL_TRUE,
WGL_PIXEL_TYPE_ARB, WGL_TYPE_RGBA_ARB,
WGL_COLOR_BITS_ARB, DesiredColorBits,
WGL_DEPTH_BITS_ARB, DesiredDepthBits,
WGL_STENCIL_BITS_ARB, DesiredStencilBits,
0 // End of attributes list
};
INT iContextAttribs[] =
{
WGL_CONTEXT_MAJOR_VERSION_ARB, MajorVersion,
WGL_CONTEXT_MINOR_VERSION_ARB, MinorVersion,
WGL_CONTEXT_FLAGS_ARB, WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB,
0 // End of attributes list
};
INT iPixelFormat, iNumFormats;
wglChoosePixelFormatARB(hDC, iPixelFormatAttribList, NULL, 1, &iPixelFormat, (UINT*)&iNumFormats);
// pfd oldstyle crap...
if (!SetPixelFormat(hDC, iPixelFormat, &pfd))
return 0;
hRC = wglCreateContextAttribsARB(hDC, 0, iContextAttribs);
}
if(hRC)
{
printf("GL_VENDOR : %s", glGetString(GL_VENDOR));
printf("GL_RENDERER : %s", glGetString(GL_RENDERER));
printf("GL_VERSION : %s", glGetString(GL_VERSION));
printf("GLEW Version : %s", glewGetString(GLEW_VERSION));
wglMakeCurrent(hDC, hRC);
}
else
return 0;
I stripped out the code, so sorry if I missed something, but the context is initialized and I am getting:
Using pixel format 9
GL_VENDOR : NVIDIA Corporation
GL_RENDERER : GeForce GT 740M/PCIe/SSE2
GL_VERSION : 4.4.0
GLEW Version : 1.13.0
The first part in the code initializes the old style context in order to get GLEW initialized and while I am a bit unsure about the PFD part in the 3.1 creation (although many examples show it like this). Nevertheless I tried a few different variants from different examples and tutorials and it always resulted in the GL_INVALID_OPERATION and GL_INVALID_ENUM when trying to set the states above.

GL_PERSPECTIVE_CORRECTION_HINT isn't in core anymore, and neither it was in 3.1. Refer to the OpenGL wiki for details, but allowed enums in 3.1 (and staying the same up to 4.5) are:
GL_LINE_SMOOTH_HINT
GL_POLYGON_SMOOTH_HINT
GL_TEXTURE_COMPRESSION_HINT
GL_FRAGMENT_SHADER_DERIVATIVE_HINT
That being said, don't bother with creating 3.1 contexts. If you can, go for 4.4/4.5, if you want to support previous generation, 3.3 is the reasonable minimum.

Related

Partially transparent window OpenGL/Win32

I have read every existing question on this tricky (niche) subject, but I am stuck. I have a Win32 window with an OpenGL context. I want my window to be partially transparent.
My result so far is that the entirety of the window is transparent. I want only the black area to be transparent, so that I can draw some 3D objects and they will look like they are coming of the window.
First, in my window class, I have set hbrBackground to black.
Windows::WindowClass windowClass;
windowClass.cbSize = sizeof(Windows::WindowClass);
windowClass.style = Windows::CS_VREDRAW | Windows::CS_HREDRAW;
windowClass.lpfnWndProc = WndProc;
windowClass.cbClsExtra = 0;
windowClass.cbWndExtra = 0;
windowClass.hInstance = moduleHandle;
windowClass.hIcon = Windows::LoadIcon(0u, (X*)Windows::IDI_QUESTION);
windowClass.hCursor = Windows::LoadCursor(0u, (X*)Windows::IDC_ARROW);
windowClass.hbrBackground = Windows::CreateSolidBrush(0x00000000);
windowClass.lpszMenuName = nullptr;
windowClass.lpszClassName = (X*)name;
windowClass.hIconSm = Windows::LoadIcon(0u, (X*)Windows::IDI_QUESTION);
I have created my window with the WS_EX_LAYERED flag.
windowHandle = Windows::CreateWindow(Windows::WS_EX_LAYERED, (X*)name, "", Windows::WS_POPUP, w / 4, h / 4, w / 2, h / 2, 0u, 0u, moduleHandle, 0u);
In my pixel format, I have enabled alpha and composition.
PixelFormatDescriptor format;
format.nSize = sizeof(PixelFormatDescriptor);
format.nVersion = 1;
format.dwFlags = PFD_SUPPORT_OPENGL | PFD_DRAW_TO_WINDOW | PFD_DOUBLEBUFFER | PFD_SUPPORT_COMPOSITION;
format.iPixelType = PFD_TYPE_RGBA;
format.cColorBits = 32;
format.cRedBits = 0;
format.cRedShift = 0;
format.cGreenBits = 0;
format.cGreenShift = 0;
format.cBlueBits = 0;
format.cBlueShift = 0;
format.cAlphaBits = 8;
format.cAlphaShift = 0;
format.cAccumBits = 0;
format.cAccumRedBits = 0;
format.cAccumGreenBits = 0;
format.cAccumBlueBits = 0;
format.cAccumAlphaBits = 0;
format.cDepthBits = 24;
format.cStencilBits = 8;
format.cAuxBuffers = 0;
format.iLayerType = PFD_MAIN_PLANE;
format.bReserved = 0;
format.dwLayerMask = 0;
format.dwVisibleMask = 0;
format.dwDamageMask = 0;
I have tried the blur region "trick", but it has no effect. My result is not related to this piece of code.
struct DwmBlurBehind
{
U4 dwFlags;
S4 fEnable;
X* blurRegionHandle;
S4 fTransitionOnMaximized;
};
DwmBlurBehind blurBehind;
blurBehind.dwFlags = Windows::DWM_BB_ENABLE | Windows::DWM_BB_BLURREGION;
blurBehind.fEnable = true;
blurBehind.blurRegionHandle = Windows::CreateRectRgn(0, 0, -1, -1);
blurBehind.fTransitionOnMaximized = false;
Windows::DwmEnableBlurBehindWindow(windowHandle, &blurBehind);
Finally, I have set the LWA_COLORKEY and LWA_ALPHA attributes. This is what gave me the effect displayed. However, the color key does not seem to be taken into account (I have tried non-zero values as well).
Windows::SetLayeredWindowAttributes(windowHandle, 0, 170, Windows::LWA_COLORKEY | Windows::LWA_ALPHA);
I did not forget to enable blending.
GL::Enable(GL::BLEND);
GL::BlendFunc(GL::SRC_ALPHA, GL::ONE_MINUS_SRC_ALPHA);
What you want to do requires window compositing which has been around since Windows Vista, so essentially every version of Windows you have to care about (Windows XP and earlier are at End of Life).
The key steps to take is, to enable DWM intra window compositing by enabling "Blur Behind Window" and use a WM_POPUP window; if you do not use WM_POPUP style the window manager will draw decorations and your OpenGL rendering will "hover" above that.
DWM_BLURBEHIND bb = {0};
bb.dwFlags = DWM_BB_ENABLE;
bb.fEnable = TRUE;
bb.hRgnBlur = NULL;
DwmEnableBlurBehindWindow(hWnd, &bb);
MARGINS margins = {-1};
impl_DwmExtendFrameIntoClientArea(hWnd, &margins);
Next, you must create an OpenGL context using the newer "attribute" API instead of using the pixelformat descriptor selection. With the attribute API you can select a transparent with alpha window format.
int attribs[] = {
WGL_DRAW_TO_WINDOW_ARB, TRUE,
WGL_DOUBLE_BUFFER_ARB, TRUE,
WGL_SUPPORT_OPENGL_ARB, TRUE,
WGL_PIXEL_TYPE_ARB, WGL_TYPE_RGBA_ARB,
WGL_TRANSPARENT_ARB, TRUE,
WGL_COLOR_BITS_ARB, 32,
WGL_RED_BITS_ARB, 8,
WGL_GREEN_BITS_ARB, 8,
WGL_BLUE_BITS_ARB, 8,
WGL_ALPHA_BITS_ARB, 8,
WGL_DEPTH_BITS_ARB, 24,
WGL_STENCIL_BITS_ARB, 8,
0, 0
};
INT iPF;
UINT num_formats_choosen;
if( !wglChoosePixelFormatARB(
hDC,
attribs,
NULL,
1,
&iPF,
&num_formats_choosen) ) {
fprintf(stderr, "error choosing proper pixel format\n");
return NULL;
}
if( !num_formats_choosen ) {
return NULL;
}
PIXELFORMATDESCRIPTOR pfd;
memset(&pfd, 0, sizeof(pfd));
/* now this is a kludge; we need to pass something in the PIXELFORMATDESCRIPTOR
* to SetPixelFormat; it will be ignored, mostly. OTOH we want to send something
* sane, we're nice people after all - it doesn't hurt if this fails. */
DescribePixelFormat(hDC, iPF, sizeof(pfd), &pfd);
if( !SetPixelFormat(hDC, iPF, &pfd) ) {
fprintf(stderr, "error setting proper pixel format\n");
ReleaseDC(hWnd, hDC);
DestroyWindow(hWnd);
return NULL;
}
I've got a complete working example for this in my wglarb wrapper repository over at GitHub:
https://github.com/datenwolf/wglarb/blob/master/test/layered.c
You might be missing the call to UpdateLayeredWindow . IIRC, not calling UpdateLayeredWindow results in some weird behavior. YMMV.
More details here
According with SetLayeredWindowAttributes doc you need to pass
a COLORREF structure that specifies the transparency color key to be
used when composing the layered window. All pixels painted by the
window in this color will be transparent. To generate a COLORREF, use
the RGB macro.
Review the second parameter of your call to this function.

Black screen in OpenGL on some machines (PIXELFORMATDESCRIPTOR/SwapBuffer?)

I've written an OpenGL application which will crash on some machines (on my own test machines it runs: Windows 8, Windows 7, Windows Vista (x86) - but on some client machines with Windows Vista (x86) it crashes) - I don't know yet why exactly, but I downgraded the application to an empty skeleton, which will just do glClear(). Then the application runs at least without crash (OpenGL context could be created, glew could be loaded), but the screen is not cleared in the specified glClearColor color. I suspect some issue in either my PIXELFORMATDESCRIPTOR or SwapBuffer doesn't work as expected there.
My code (I left out the windows creation and main() for simplicity):
hdc = GetDC(hWnd);
int pf;
PIXELFORMATDESCRIPTOR pfd;
memset(&pfd, 0, sizeof(pfd));
pfd.nSize = sizeof(pfd);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = 32;
pf = ChoosePixelFormat(hdc, &pfd);
if (pf == 0) {
MessageBox(NULL, "ChoosePixelFormat() failed: "
"Cannot find a suitable pixel format.", "Error", MB_OK);
}
if (SetPixelFormat(hdc, pf, &pfd) == FALSE) {
MessageBox(NULL, "SetPixelFormat() failed: "
"Cannot set format specified.", "Error", MB_OK);
}
DescribePixelFormat(hdc, pf, sizeof(PIXELFORMATDESCRIPTOR), &pfd);
hglrc = wglCreateContext(hdc);
if(!wglMakeCurrent(hdc, hglrc)){
MessageBox(NULL, "wglMakeCurrent() failed: "
"Cannot make context current.", "Error", MB_OK);
}
GLenum err = glewInit();
if (GLEW_OK != err){
/* Problem: glewInit failed, something is seriously wrong. */
fprintf(stdout, "Error: %s\n", glewGetErrorString(err));
}
fprintf(stdout, "Status: Using GLEW %s\n", glewGetString(GLEW_VERSION));
This code outputs: "Status: Using GLEW 1.10.0"
My main loop then:
while(true){
timeDuration = std::chrono::duration_cast<std::chrono::duration<double> >(std::chrono::high_resolution_clock::now() - lastTime);
lastTime = std::chrono::high_resolution_clock::now();
time += timeDuration.count();
glClearColor(0.7f, 0.7f, 0.7f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
SwapBuffers(hdc);
while(PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)){
TranslateMessage(&msg);
DispatchMessage(&msg);
}
}
On my own machine I receive an gray screen (as expected) - but on the machines where my original program crashed, the screen is just black (but none of the MessageBoxes appear and the output is also: "Status: Using GLEW 1.10.0"). So I can't see any evidence for an error - but the output is different and glClearColor() seems to be ignored.
Any ideas on how I could hunt down this issue further?
Well, I'm still not sure why exactly it did run on my machine before, but the issue was the line:
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL;
after changing it to:
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER;
it did run anywhere. I didn't specified double buffering in the PIXELFORMATDESCRIPTOR and that seemed to bring strange issues with it (worked on my machine for some reason - but not on others).

Modern OpenGL context failure

OK, I managed to create an OpenGL context with wglcreatecontextattribARB with version 3.2 in my attrib struct (So I have initialized a 3.2 opengl context).
It works, but the strange thing is, when I use glBindBuffer e,g. I still get unreferenced linker error, shouldn't a newer context prevent this?
I'm on windows BTW, Linux doesn't have to deal with older and newer contexts (it directly supports the core of its version).
The code:
PIXELFORMATDESCRIPTOR pfd;
HGLRC tmpRC;
int iFormat;
if (!(hDC = GetDC(hWnd)))
{
CMsgBox("Unable to create a device context. Program will now close.", "Error");
return false;
}
ZeroMemory(&pfd, sizeof(pfd));
pfd.nSize = sizeof(pfd);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = attribs->colorbits;
pfd.cDepthBits = attribs->depthbits;
pfd.iLayerType = PFD_MAIN_PLANE;
if (!(iFormat = ChoosePixelFormat(hDC, &pfd)))
{
CMsgBox("Unable to find a suitable pixel format. Program will now close.", "Error");
return false;
}
if (!SetPixelFormat(hDC, iFormat, &pfd))
{
CMsgBox("Unable to initialize the pixel formats. Program will now close.", "Error");
return false;
}
if (!(tmpRC=wglCreateContext(hDC)))
{
CMsgBox("Unable to create a rendering context. Program will now close.", "Error");
return false;
}
if (!wglMakeCurrent(hDC, tmpRC))
{
CMsgBox("Unable to activate the rendering context. Program will now close.", "Error");
return false;
}
strncpy(vers, (char*)glGetString(GL_VERSION), 3);
vers[3] = '\0';
if (sscanf(vers, "%i.%i", &glv, &glsubv) != 2)
{
CMsgBox("Unable to retrieve the OpenGL version. Program will now close.", "Error");
return false;
}
hRC = NULL;
if (glv > 2) // Have OpenGL 3.+ support
{
if ((wglCreateContextAttribsARB = (PFNWGLCREATECONTEXTATTRIBSARBPROC)wglGetProcAddress("wglCreateContextAttribsARB")))
{
int attribs[] = {WGL_CONTEXT_MAJOR_VERSION_ARB, glv, WGL_CONTEXT_MINOR_VERSION_ARB, glsubv,WGL_CONTEXT_FLAGS_ARB, 0,0};
hRC = wglCreateContextAttribsARB(hDC, 0, attribs);
wglMakeCurrent(NULL, NULL);
wglDeleteContext(tmpRC);
if (!wglMakeCurrent(hDC, hRC))
{
CMsgBox("Unable to activate the rendering context. Program will now close.", "Error");
return false;
}
moderncontext = true;
}
}
if (hRC == NULL)
{
hRC = tmpRC;
moderncontext = false;
}
You will still need to
Declare function pointers with the apropriate names and function signatures.
Fetch the correct memory locations for those pointers with wglGetProcAddress
#define the actual OpenGL API names to the corresponding function pointers.
That's right, the OpenGL API functions are actually function pointers.
If you don't have the time and patience to do this, then it is advised to use an OpenGL loader library, like GL3W or GLEW. This'll also save you of the burden of first creating your dummy context and then the "real" context.
Also see the OpenGL wiki page on loading function pointers.

OpenGL creating render context fail

I'm writing some OpenGL 3 program on C++, and now i have an issue on my laptop Lenovo Thinkpad e320 (Intel HD Graphics 3000). It works fine on my PC (ATI Radeon HD 5870).
Code near the error is following:
bool GLWindowCreate(const char *title, int width, int height, bool fullScreen){
...
g_hRC = wglCreateContextAttribsARB(g_hDC, 0, attribs);
if (!g_hRC || !wglMakeCurrent(g_hDC, g_hRC))
{
LOG_ERROR("Creating render context fail (%d)\n", GetLastError());
return false;
}
...
}
All compiles fine and i saw this error in log file.
I'm using Windows 8 (both on PC and laptop). Graphic card on laptop supports OpenGL 3. I have found some answer about same problem that i need to turn off hardware acceleration, but it seems there is no way to do this in Win 8.
Added:
Whole window create function:
bool GLWindowCreate(const char *title, int width, int height, bool fullScreen)
{
ASSERT(title);
ASSERT(width > 0);
ASSERT(height > 0);
WNDCLASSEX wcx;
PIXELFORMATDESCRIPTOR pfd;
RECT rect;
HGLRC hRCTemp;
DWORD style, exStyle;
int x, y, format;
memset(&g_window, 0, sizeof(g_window));
memset(&g_input, 0, sizeof(g_input));
PFNWGLCREATECONTEXTATTRIBSARBPROC wglCreateContextAttribsARB = NULL;
// attributes for OpenGL context
int attribs[] =
{
WGL_CONTEXT_MAJOR_VERSION_ARB, 3,
WGL_CONTEXT_MINOR_VERSION_ARB, 3,
WGL_CONTEXT_FLAGS_ARB, WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB,
WGL_CONTEXT_PROFILE_MASK_ARB, WGL_CONTEXT_CORE_PROFILE_BIT_ARB,
0
};
// timer init
QueryPerformanceFrequency(&g_qpc);
ASSERT(g_qpc.QuadPart > 0);
g_timerFrequency = 1.0 / g_qpc.QuadPart;
g_hInstance = (HINSTANCE)GetModuleHandle(NULL);
memset(&wcx, 0, sizeof(wcx));
wcx.cbSize = sizeof(wcx);
wcx.style = CS_HREDRAW | CS_VREDRAW | CS_OWNDC;
wcx.lpfnWndProc = (WNDPROC)GLWindowProc;
wcx.hInstance = g_hInstance;
wcx.lpszClassName = GLWINDOW_CLASS_NAME;
wcx.hIcon = LoadIcon(NULL, IDI_APPLICATION);
wcx.hCursor = LoadCursor(NULL, IDC_ARROW);
if (!RegisterClassEx(&wcx))
{
LOG_ERROR("RegisterClassEx fail (%d)\n", GetLastError());
return false;
}
style = WS_CAPTION | WS_SYSMENU | WS_MINIMIZEBOX;
exStyle = WS_EX_APPWINDOW;
x = (GetSystemMetrics(SM_CXSCREEN) - width) / 2;
y = (GetSystemMetrics(SM_CYSCREEN) - height) / 2;
rect.left = x;
rect.right = x + width;
rect.top = y;
rect.bottom = y + height;
AdjustWindowRectEx (&rect, style, FALSE, exStyle);
// creating window
g_hWnd = CreateWindowEx(exStyle, GLWINDOW_CLASS_NAME, title, style, rect.left, rect.top,
rect.right - rect.left, rect.bottom - rect.top, NULL, NULL, g_hInstance, NULL);
if (!g_hWnd)
{
LOG_ERROR("CreateWindowEx fail (%d)\n", GetLastError());
return false;
}
// get window descriptor
g_hDC = GetDC(g_hWnd);
if (!g_hDC)
{
LOG_ERROR("GetDC fail (%d)\n", GetLastError());
return false;
}
memset(&pfd, 0, sizeof(pfd));
pfd.nSize = sizeof(pfd);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = 32;
pfd.cDepthBits = 24;
// get pixel format
format = ChoosePixelFormat(g_hDC, &pfd);
if (!format || !SetPixelFormat(g_hDC, format, &pfd))
{
LOG_ERROR("Setting pixel format fail (%d)\n", GetLastError());
return false;
}
// creating temp context
// to get wglCreateContextAttribsARB function
hRCTemp = wglCreateContext(g_hDC);
if (!hRCTemp || !wglMakeCurrent(g_hDC, hRCTemp))
{
LOG_ERROR("Сreating temp render context fail (%d)\n", GetLastError());
return false;
}
OPENGL_GET_PROC(PFNWGLCREATECONTEXTATTRIBSARBPROC, wglCreateContextAttribsARB);
// delete temp context
wglMakeCurrent(NULL, NULL);
wglDeleteContext(hRCTemp);
// creating OpenGL 3 context
g_hRC = wglCreateContextAttribsARB(g_hDC, 0, attribs);
if (!g_hRC || !wglMakeCurrent(g_hDC, g_hRC))
{
LOG_ERROR("Creating render context fail (%d)\n", GetLastError());
return false;
}
int major, minor;
glGetIntegerv(GL_MAJOR_VERSION, &major);
glGetIntegerv(GL_MINOR_VERSION, &minor);
LOG_DEBUG("OpenGL render context information:\n"
" Renderer : %s\n"
" Vendor : %s\n"
" Version : %s\n"
" GLSL version : %s\n"
" OpenGL version : %d.%d\n",
(const char*)glGetString(GL_RENDERER),
(const char*)glGetString(GL_VENDOR),
(const char*)glGetString(GL_VERSION),
(const char*)glGetString(GL_SHADING_LANGUAGE_VERSION),
major, minor
);
if (!OpenGLInitExtensions())
return false;
GLWindowSetSize(width, height, fullScreen);
return true;
}
I have accidentally found a decision. Problem was there:
int attribs[] =
{
WGL_CONTEXT_MAJOR_VERSION_ARB, 3,
WGL_CONTEXT_MINOR_VERSION_ARB, 3,
WGL_CONTEXT_FLAGS_ARB, WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB,
WGL_CONTEXT_PROFILE_MASK_ARB, WGL_CONTEXT_CORE_PROFILE_BIT_ARB,
0
};
Intel HD Graphics 3000 supports only OpenGL 3.1, not 3.3, so i have had to change
WGL_CONTEXT_MINOR_VERSION_ARB, 3,
to
WGL_CONTEXT_MINOR_VERSION_ARB, 1,
Thanks everyone and sorry for worry, hope my problem solution will help someone

SetPixelFormat fail when using WGL_SAMPLE_BUFFERS_ARB / WGL_SAMPLES_ARB

I decided to add MSAA support into my application and therefor I checked what's up to do. The examples I found always refer to WGL_SAMPLE_BUFFERS_ARB / WGL_SAMPLES_ARB in the pixelformatdescriptor to do that. I am using glew right now:
int DesiredColorBits = 32;
int DesiredStencilBits = 0;
int DesiredDepthBits = 24;
int MajorVersion=3;
int MinorVersion=3;
PIXELFORMATDESCRIPTOR temppfd =
{
sizeof(PIXELFORMATDESCRIPTOR),
1,
PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
PFD_TYPE_RGBA,
DesiredColorBits,
0, 0, 0, 0, 0, 0,
0, 0,
0, 0, 0, 0, 0,
DesiredDepthBits,
0,//DesiredStencilBits,
0,
PFD_MAIN_PLANE,
0,
0, 0, 0
};
HDC TemphDC;
TemphDC = GetDC( hWnd );
int nPixelFormat = ChoosePixelFormat( TemphDC, &temppfd );
check(nPixelFormat);
verify(SetPixelFormat( TemphDC, nPixelFormat, &temppfd ));
// oldstyle context to init glew.
HGLRC tempContext = wglCreateContext(TemphDC);
wglMakeCurrent(TemphDC, tempContext);
//init glew
glewExperimental = GL_TRUE;
GLenum err = glewInit();
if (GLEW_OK != err)
errorprintf(TEXT("Error: Init glew failed: %s"), glewGetErrorString(err)));
else debugprintf( TEXT("Glew successfully initialized."));
ReleaseDC(hWnd, TemphDC);
//Now init pure OpenGL >= 3.3 context.
if (WGLEW_ARB_create_context && WGLEW_ARB_pixel_format)
{
wglMakeCurrent(NULL, NULL);
wglDeleteContext(tempContext);
PIXELFORMATDESCRIPTOR pfd;
memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));
pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DOUBLEBUFFER | PFD_SUPPORT_OPENGL | PFD_DRAW_TO_WINDOW;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = DesiredColorBits;
pfd.cDepthBits = DesiredDepthBits;
pfd.iLayerType = PFD_MAIN_PLANE;
int iPixelFormatAttribList[30];
iPixelFormatAttribList[0] = WGL_DRAW_TO_WINDOW_ARB;
iPixelFormatAttribList[1] = GL_TRUE;
iPixelFormatAttribList[2] = WGL_SUPPORT_OPENGL_ARB;
iPixelFormatAttribList[3] = GL_TRUE;
iPixelFormatAttribList[4] = WGL_DOUBLE_BUFFER_ARB;
iPixelFormatAttribList[5] = GL_TRUE;
iPixelFormatAttribList[6] = WGL_PIXEL_TYPE_ARB;
iPixelFormatAttribList[7] = WGL_TYPE_RGBA_ARB;
iPixelFormatAttribList[8] = WGL_COLOR_BITS_ARB;
iPixelFormatAttribList[9] = DesiredColorBits;
iPixelFormatAttribList[10] = WGL_DEPTH_BITS_ARB;
iPixelFormatAttribList[11] = DesiredDepthBits;
iPixelFormatAttribList[12] = WGL_STENCIL_BITS_ARB;
iPixelFormatAttribList[13] = 0;
iPixelFormatAttribList[14] = WGL_SAMPLE_BUFFERS_ARB;
iPixelFormatAttribList[15] = GL_TRUE;
iPixelFormatAttribList[16] = WGL_SAMPLES_ARB;
iPixelFormatAttribList[17] = NumAASamples;
iPixelFormatAttribList[18] = 0;
int iPixelFormat, iNumFormats;
wglChoosePixelFormatARB(hDC, iPixelFormatAttribList, NULL, 1, &iPixelFormat, (GLuint*)&iNumFormats);
if( iNumFormats == 0 )
{
debugprintf(TEXT("Couldn't support multisampling"));
}
else debugprintf(TEXT("Available formats: %i %i"),iNumFormats,iPixelFormat);
int iContextAttribs[] =
{
WGL_CONTEXT_MAJOR_VERSION_ARB, MajorVersion,
WGL_CONTEXT_MINOR_VERSION_ARB, MinorVersion,
WGL_CONTEXT_FLAGS_ARB, ContextFlags,
0 // End of attributes list
};
// pfd oldstyle crap...
debugprintf(TEXT("DesiredColorBits: %i"), DesiredColorBits);
debugprintf(TEXT("DesiredDepthBits: %i"), DesiredDepthBits);
if (!SetPixelFormat(hDC, iPixelFormat, &pfd))
{
debugprintf(TEXT("Setting PixelFormat %i failed!"), iPixelFormat);
iPixelFormat = ChoosePixelFormat(hDC, &pfd);
if (!SetPixelFormat(hDC, iPixelFormat, &pfd))
errorprintf(TEXT("Error: SetPixelFormat %i failed."),iPixelFormat);
}
hRC = wglCreateContextAttribsARB(hDC, 0, iContextAttribs);
}
else errorprintf(TEXT("Error: Init glew failed: %s"), glewGetErrorString(err)));
if(hRC)
{
MakeCurrent();
debugprintf( TEXT("GL_VENDOR : %s"), glGetString(GL_VENDOR));
debugprintf( TEXT("GL_RENDERER : %s"), glGetString(GL_RENDERER));
debugprintf( TEXT("GL_VERSION : %s"), glGetString(GL_VERSION));
debugprintf( TEXT("GLEW Version : %s"), glewGetString(GLEW_VERSION));
int NumberOfAASamples=0, NumberOfSampleBufers;
glGetintegerv(GL_SAMPLE_BUFFERS,&NumberOfSampleBufers);
glGetintegerv(GL_SAMPLES,&NumberOfAASamples);
debugprintf(TEXT("SampleBuffers: %i, NumAASamples: (%i/%i)"),NumberOfSampleBufers, NumberOfAASamples,NumAASamples);
int NumberOfExtensions=0;
glGetintegerv(GL_NUM_EXTENSIONS, &NumberOfExtensions);
for (int i = 0; i<NumberOfExtensions; i++)
{
FString ExtensionString = glGetStringi(GL_EXTENSIONS, i));
debugprintf(NAME_DevLoad, TEXT("GL_EXTENSIONS(%i) : %s"), i, ExtensionString);
}
debugprintf( TEXT("OpenGL %i.%i context initialized!"), MajorVersion,MinorVersion);
}
So far, so good, I am getting my tempcontext to init glew, it initializes correctly and is ready to use, also no problems with SetPixelFormat- without sample buffers at least.
Now if I am adding WGL_SAMPLE_BUFFERS_ARB and WGL_SAMPLES_ARB I still get a pixelformat back from wglChoosePixelFormatARB (indeed iNumFormats returns 32), but SetPixelFormat always fails with the pixelformat returned from it.
I tried various color and depth bits, but no matter what I do, I can't seem to find a way to make this work. I also tried this on my NVidia and an Intel card, no AA preset is forced either.
Now- is there a different way to get this result? I take it that SetPixelformat is "deprecated", partially even obsolete from what I read, yet I seem to need it here to enable MSAA, which is a kinda annoying dependency.
Did I miss something?
I solved it by changing one thing, I created a completely standalone window for glew init like this:
LRESULT CALLBACK WndProc(HWND hWnd, UINT uiMsg, WPARAM wParam, LPARAM lParam)
{
switch(uiMsg)
{
case WM_CLOSE:
PostQuitMessage(0);
break;
default:
return DefWindowProc(hWnd, uiMsg, wParam, lParam);
}
return 0;
}
PIXELFORMATDESCRIPTOR temppfd =
{
sizeof(PIXELFORMATDESCRIPTOR),
1,
PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
PFD_TYPE_RGBA,
DesiredColorBits,
0, 0, 0, 0, 0, 0,
0, 0,
0, 0, 0, 0, 0,
DesiredDepthBits,
0,//DesiredStencilBits,
0,
PFD_MAIN_PLANE,
0,
0, 0, 0
};
DWORD Style = WS_OVERLAPPEDWINDOW | WS_CLIPSIBLINGS | WS_CLIPCHILDREN;
WNDCLASSEX WndClassEx;
memset(&WndClassEx, 0, sizeof(WNDCLASSEX));
WndClassEx.cbSize = sizeof(WNDCLASSEX);
WndClassEx.style = CS_OWNDC | CS_HREDRAW | CS_VREDRAW;
WndClassEx.lpfnWndProc = WndProc;
WndClassEx.hInstance = hInstance;
WndClassEx.hIcon = LoadIcon(NULL, IDI_APPLICATION);
WndClassEx.hIconSm = LoadIcon(NULL, IDI_APPLICATION);
WndClassEx.hCursor = LoadCursor(NULL, IDC_ARROW);
WndClassEx.lpszClassName = L"Win32OpenGLWindow";
if(RegisterClassEx(&WndClassEx) == 0)
{
debugprintf(TEXT("RegisterClassEx failed!"));
}
HWND TemphWnd = CreateWindowEx(WS_EX_APPWINDOW, WndClassEx.lpszClassName, L"InitWIndow", Style, 0, 0, SizeX, SizeY, NULL, NULL, hInstance, NULL);
HDC TemphDC = GetDC( TemphWnd );
INT nPixelFormat = ChoosePixelFormat( TemphDC, &temppfd );
check(nPixelFormat);
verify(SetPixelFormat( TemphDC, nPixelFormat, &temppfd ));
// oldstyle context to init glew.
HGLRC tempContext = wglCreateContext(TemphDC);
wglMakeCurrent(TemphDC, tempContext);
//init glew
glewExperimental = GL_TRUE;
GLenum err = glewInit();
if (GLEW_OK != err)
errorprintf(TEXT("Error: Init glew failed: %s"),glewGetErrorString(err));
else debugprintf(TEXT("Glew successfully initialized."));
//Now init pure OpenGL >= 3.3 context.
if (WGLEW_ARB_create_context && WGLEW_ARB_pixel_format)
{
wglMakeCurrent(NULL, NULL);
wglDeleteContext(tempContext);
ReleaseDC(TemphWnd, TemphDC);
DestroyWindow(TemphWnd);
...
After destroying this temp window and then using the actual one for the game like shown above it was possible to use SetPixelFormat with the new options. Why it didn't cause any trouble like this when not adding WGL_SAMPLE_BUFFERS_ARB / WGL_SAMPLES_ARB I don't know, I assume that it is not possible to really change the pixelformat for a window afterwards, unrelated to any hWnd, hDC or context and if those are destroyed or renewed in between. Many examples and tutorials in the web are not clear about that and are showing this indeed the wrong way too.
In short, create a tempwindow, temphWnd and temphDC, then init glew, destroy all temporary stuff, then go on creating the context you really want with the real window, hWnd and hDC, otherwise you'll buy some trouble.
Yes, I think it is a real mess to get clean context in windows.
Either way, I hope this will help others.