I want to set up MSAA on an OpenGL context in win32 API. Everything is working fine, but the MSAA just doesn't want to activate. Here is my code for building the context:
void Display::CreateGLContext(HWND hWND) {
mHDC = GetDC(hWND); //get current windows device context
int nPixelFormat;
PIXELFORMATDESCRIPTOR pfd; // Create a new PIXELFORMATDESCRIPTOR (PFD)
memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR)); // Clear our PFD
pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR); // Set the size of the PFD to the size of the class
pfd.dwFlags = PFD_DOUBLEBUFFER | PFD_SUPPORT_OPENGL | PFD_DRAW_TO_WINDOW; // Enable double buffering, opengl support and drawing to a window
pfd.iPixelType = PFD_TYPE_RGBA; // Set our application to use RGBA pixels
pfd.cColorBits = 32; // Give us 32 bits of color information (the higher, the more colors)
pfd.cDepthBits = 16; // Give us 32 bits of depth information (the higher, the more depth levels)
pfd.iLayerType = PFD_MAIN_PLANE; // Set the layer of the PFD
/* Choose best matching format*/
nPixelFormat = ChoosePixelFormat(mHDC, &pfd);
/* Set the pixel format to the device context*/
SetPixelFormat(mHDC, nPixelFormat, &pfd);
HGLRC tempRC = wglCreateContext(mHDC);
wglMakeCurrent(mHDC, tempRC);
if (glewInit() != GLEW_OK) {
MessageBox(mHWND, "Eroare", "glew", MB_OK);
}
int nPixelFormat2;
BOOL bValidPixFormat;
UINT nMaxFormats = 1;
UINT nNumFormats;
float pfAttribFList[] = { 0, 0 };
int piAttribIList[] = {
WGL_DRAW_TO_WINDOW_ARB, GL_TRUE,
WGL_SUPPORT_OPENGL_ARB, GL_TRUE,
WGL_COLOR_BITS_ARB, 32,
WGL_RED_BITS_ARB, 8,
WGL_GREEN_BITS_ARB, 8,
WGL_BLUE_BITS_ARB, 8,
WGL_ALPHA_BITS_ARB, 8,
WGL_DEPTH_BITS_ARB, 16,
WGL_STENCIL_BITS_ARB, 0,
WGL_DOUBLE_BUFFER_ARB, GL_TRUE,
WGL_PIXEL_TYPE_ARB, WGL_TYPE_RGBA_ARB,
WGL_SAMPLE_BUFFERS_ARB, GL_TRUE,
WGL_SAMPLES_ARB, 16,
0, 0 };
bValidPixFormat = wglChoosePixelFormatARB(mHDC, piAttribIList, pfAttribFList, nMaxFormats, &nPixelFormat2, &nNumFormats);
if (!bValidPixFormat)
{
MessageBox(NULL, "Invalid Pixel Format", "Error! (SetupWGLPixelFormat)", MB_OK);
}
SetPixelFormat(mHDC, nPixelFormat2, &pfd);
mGLRenderContext = wglCreateContext(mHDC);
wglMakeCurrent(mHDC, NULL);
wglDeleteContext(tempRC);
wglMakeCurrent(mHDC, mGLRenderContext);
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
}
The code works fine, it is called after creating hWnd in the main class, not in WndProc on the WM_CREATE case... What can be wrong?
I see the part where you asked for 16 samples. But I don't see the part where you enabled GL_MULTISAMPLE. Without which, rendering to a multisampled buffer will act no differently from rendering to a single sampled one.
Also, I would advise you to use a framebuffer object for your multisample render target instead of the default framebuffer. Yes, it's nice that the default framebuffer can be resized by the window. But by using a framebuffer object, you can control when multisampling is resolved.
Also, it allows you to keep the driver's pesky control panel options from messing with your sample counts ;)
I have found the problem. Basically you can't call SetPixelFormat on the same window as this article mentions: https://www.khronos.org/opengl/wiki/Creating_an_OpenGL_Context_(WGL)#Proper_Context_Creation
The solution is basically to create a dummy window(not visible) enable opengl and delete it. I have copied the code from here and it worked for me Create Modern OpenGL context using WGL?
Related
I am trying to render a 2D image using OpenGL(for rendering) and DevIL(for loading image). But nothing gets rendered. Upon error checking I have found that OpenGL throws Invalid Operation on wglMakeCurrent call.
Following is my initialization function
void CImageMainView::InitializeOpenGL()
{
m_pDC = new CClientDC(this);
m_hDC = m_pDC->GetSafeHdc();
SetupPixelFormat();
m_hRC = ::wglCreateContext(m_hDC);
BOOL ret = ::wglMakeCurrent(m_hDC, m_hRC);
if(!ret){
printf("Error making current context\n");
}
printf("wglMakeCurrent ");
CheckGLError();
GetOpenGLExtendedInformation();
::wglMakeCurrent(NULL, NULL);
printf("wglMakeCurrent to null");
CheckGLError();
return;
}
SetupPixelFormat function looks like following
void CImageMainView::SetupPixelFormat()
{
static PIXELFORMATDESCRIPTOR pfd =
{
sizeof(PIXELFORMATDESCRIPTOR), // size of this pfd
1, // version number
PFD_DRAW_TO_WINDOW | // support window
PFD_SUPPORT_OPENGL | // support OpenGL
PFD_DOUBLEBUFFER, // double buffered
PFD_TYPE_RGBA, // RGBA type
24, // 24-bit color depth
0, 0, 0, 0, 0, 0, // color bits ignored
0, // no alpha buffer
0, // shift bit ignored
0, // no accumulation buffer
0, 0, 0, 0, // accum bits ignored
32, // 32-bit z-buffer
0, // no stencil buffer
0, // no auxiliary buffer
PFD_MAIN_PLANE, // main layer
0, // reserved
0, 0, 0 // layer masks ignored
};
m_PixelFormat = ::ChoosePixelFormat(m_hDC, &pfd);
::SetPixelFormat(m_hDC, m_PixelFormat, &pfd);
return;
}
and lastly the error checking function is
void CImageMainView::CheckGLError()
{
const GLenum err = glGetError();
printf("GLError: %s\n", gluErrorString(err));
}
I do have another View in my application that renders 3D stuff using OpenGL. Could that be the reason?
I have following code:
void DrawGLScene(unsigned char *drawing_bytes, HDC hdc, int xWidth, int yWidth) {
if ((!xWidth) || (!yWidth)) return;
BOOL returnVal = wglMakeCurrent(hdc, hrc);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, xWidth, yWidth, 0, GL_BGR_EXT, GL_UNSIGNED_BYTE, drawing_bytes);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST);
glViewport(0,0,xWidth,yWidth); // Reset The Current Viewport
glMatrixMode(GL_PROJECTION); // Select The Projection Matrix
glLoadIdentity(); // Reset The Projection Matrix
// Calculate The Aspect Ratio Of The Window
gluPerspective(25.0f,1.0f,0.1f,100.0f);
glMatrixMode(GL_MODELVIEW); // Select The Modelview Matrix
glLoadIdentity(); // Reset The Modelview Matrix
glEnable(GL_TEXTURE_2D); // Enable Texture Mapping
glShadeModel(GL_SMOOTH); // Enable Smooth Shading
glDisable(GL_DEPTH_TEST); // Enables Depth Testing
glDepthFunc(GL_LEQUAL); // The Type Of Depth Testing To Do
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST); // Really Nice Perspective Calculations
glLoadIdentity(); // Reset The View
glTranslatef(0.0f,0.0f,-5.0f);
glBindTexture(GL_TEXTURE_2D, texture);
glColor4f(1.0, 1.0, 1.0, 1.0);
glBegin(GL_QUADS);
// Front Face
glTexCoord2f(0.0f, 0.0f); glVertex3f(-1.0f, -1.0f, 0.5f);
glTexCoord2f(1.0f, 0.0f); glVertex3f( 1.0f, -1.0f, 0.5f);
glTexCoord2f(1.0f, 1.0f); glVertex3f( 1.0f, 1.0f, 0.5f);
glTexCoord2f(0.0f, 1.0f); glVertex3f(-1.0f, 1.0f, 0.5f);
glEnd();
glSwapBuffers(hdc);
}
This code overwrites my buttons created earlier via
hInstallButton = CreateWindow(TEXT("button"), "",
WS_VISIBLE | WS_CHILD | BS_AUTOCHECKBOX,
137, 70, 13, 13,
hWnd, (HMENU) 1, GetModuleHandle(NULL), NULL);
The issue is the glSwapBuffers(), which hides the buttons for good.
This is generated by the PIXELFORMATDESCRIPTOR
static PIXELFORMATDESCRIPTOR pfd= // pfd Tells Windows How We Want Things To Be
{
sizeof(PIXELFORMATDESCRIPTOR), // Size Of This Pixel Format Descriptor
1, // Version Number
PFD_DRAW_TO_WINDOW | // Format Must Support Window
PFD_SUPPORT_OPENGL | // Format Must Support OpenGL
0, // Must Support Double Buffering
PFD_TYPE_RGBA, // Request An RGBA Format
24, // Select Our Color Depth
0, 0, 0, 0, 0, 0, // Color Bits Ignored
0, // No Alpha Buffer
0, // Shift Bit Ignored
0, // No Accumulation Buffer
0, 0, 0, 0, // Accumulation Bits Ignored
16, // 16Bit Z-Buffer (Depth Buffer)
0, // No Stencil Buffer
0, // No Auxiliary Buffer
PFD_MAIN_PLANE, // Main Drawing Layer
0, // Reserved
0, 0, 0 // Layer Masks Ignored
};
How can I force a single buffer, or something to write the buttons to both buffers? I am at a good loss here, and don't know how to do it properly (except maybe recreate the buttons with each WM_PAINT call)?
Edit:
Tried with subwindow (see code), but it creates a second window, instead of embedding into the first window.
BOOL InitInstance(HINSTANCE hInstance, int nCmdShow)
{
HWND hWnd;
hInst = hInstance; // Instanzenhandle in der globalen Variablen speichern
DWORD dwExStyle; // Window Extended Style
DWORD dwStyle; // Window Style
hWnd = CreateWindow(szWindowClass, szTitle, WS_OVERLAPPEDWINDOW,
CW_USEDEFAULT, 0, CW_USEDEFAULT, 0, NULL, NULL, hInstance, NULL);
if (!hWnd) {
return FALSE;
}
dwExStyle=WS_EX_APPWINDOW | WS_EX_WINDOWEDGE | CS_OWNDC; // Window Extended Style
dwStyle=WS_VISIBLE | WS_CLIPSIBLINGS | WS_CLIPCHILDREN; // Windows Style
WNDCLASS wndClass;
wndClass.style = CS_OWNDC | CS_HREDRAW | CS_VREDRAW;
wndClass.lpfnWndProc = WndProc;
wndClass.cbClsExtra = 0;
wndClass.cbWndExtra = 0;
wndClass.hInstance = hInstance;
wndClass.hIcon = LoadIcon(NULL, IDI_APPLICATION);
wndClass.hCursor = LoadCursor(NULL, IDC_ARROW);
wndClass.hbrBackground = CreateSolidBrush(BLACK_BRUSH);
wndClass.lpszMenuName = NULL;
wndClass.lpszClassName = "Test Window";
RegisterClass(&wndClass);
hWndOpenGL = CreateWindowEx( dwExStyle, // Extended Style For The Window
"Test Window", // Class Name
"Testy test", // Window Title
dwStyle, // Required Window Style
0, 0, // Window Position
800,
600,
hWnd, // Parent Window
NULL, // No Menu
hInstance, // Instance
NULL);
//CreateWindow(szWindowClass, szTitle, WS_OVERLAPPEDWINDOW,
//CW_USEDEFAULT, 0, CW_USEDEFAULT-500, 0, hWnd, NULL, hInstance, NULL);
static PIXELFORMATDESCRIPTOR pfd= // pfd Tells Windows How We Want Things To Be
{
sizeof(PIXELFORMATDESCRIPTOR), // Size Of This Pixel Format Descriptor
1, // Version Number
PFD_DRAW_TO_WINDOW | // Format Must Support Window
PFD_SUPPORT_OPENGL | // Format Must Support OpenGL
0, // Must Support Double Buffering
PFD_TYPE_RGBA, // Request An RGBA Format
24, // Select Our Color Depth
0, 0, 0, 0, 0, 0, // Color Bits Ignored
0, // No Alpha Buffer
0, // Shift Bit Ignored
0, // No Accumulation Buffer
0, 0, 0, 0, // Accumulation Bits Ignored
16, // 16Bit Z-Buffer (Depth Buffer)
0, // No Stencil Buffer
0, // No Auxiliary Buffer
PFD_MAIN_PLANE, // Main Drawing Layer
0, // Reserved
0, 0, 0 // Layer Masks Ignored
};
hdcOpenGL=GetDC(hWndOpenGL);
GLuint PixelFormat; // Holds The Results After Searching For A Match
PixelFormat=ChoosePixelFormat(hdcOpenGL,&pfd);
SetPixelFormat(hdcOpenGL,PixelFormat,&pfd);
hrc=wglCreateContext(hdcOpenGL);
ShowWindow(hWnd, nCmdShow);
UpdateWindow(hWnd);
ShowWindow(hWndOpenGL, nCmdShow);
UpdateWindow(hWndOpenGL);
return TRUE;
}
I'm guessing that you created the buttons as childs of the OpenGL window. If you did this, well, then you actually did something, that's explicitly mentioned in the WGL and Win32 API documentation to break things .
The fix is simple: The OpenGL window should be a sibling to the buttons and have its very own DC: Create a own subwindow for OpenGL operations with the CS_OWNDC window class flag set and the WS_CLIPSIBLINGS | WS_CLIPCHILDREN window styles being set. Both the OpenGL subwindow and the buttons are created with the desired container window as parent.
That way the buttons will not get clobbered by OpenGL operations, even with a double buffered pixelformat.
I am trying to create a simple ActiveX control using OpenGL. I add some styles in PreCreateWindow:
BOOL CMFCActiveXControl1Ctrl::PreCreateWindow(CREATESTRUCT& cs) {
cs.style |= WS_CLIPSIBLINGS | WS_CLIPCHILDREN;
cs.lpszClass = _T("STATIC");
return COleControl::PreCreateWindow(cs);
}
Initialization of OpenGL:
int CMFCActiveXControl1Ctrl::OnCreate(LPCREATESTRUCT lpCreateStruct) {
PIXELFORMATDESCRIPTOR pfd = { 0 };
pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_DOUBLEBUFFER | PFD_SUPPORT_OPENGL;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = 24;
pfd.cDepthBits = 16;
pfd.iLayerType = PFD_MAIN_PLANE;
hDC = ::GetDC(m_hWnd);
int format = ChoosePixelFormat(hDC, &pfd);
SetPixelFormat(hDC, format, &pfd);
hRC = wglCreateContext(hDC);
wglMakeCurrent(hDC, hRC);
return 0;
}
And then I try to clear color buffer with red color, but all I see is just a black square:
void CMFCActiveXControl1Ctrl::OnDraw(
CDC* pdc, const CRect& rcBounds, const CRect& rcInvalid)
{
if (!pdc)
return;
glClearColor(1, 0, 0, 0);
SwapBuffers(wglGetCurrentDC());
}
glClearColor (...) does not actually clear the color buffer, it just sets the color that will be used when you call glClear (...).
There is a new function in GL3: glClearBuffer (...) that can be used to clear a buffer to an explicit value all in a single call, but ordinarily you are going to need to call glClear (GL_COLOR_BUFFER_BIT) after setting the clear color instead.
Im trying to create a popup window with half transparency that renders stuff on itself with DirectX.
The problem is that background does not redraw itself only if rendering is enabled. Redraw happens only when updating (ie when I select a line in a text editor behind my popup window).
Magic begins when my window gets moved to the secondary monitor. Its all ok there. Transparency works perfectly, background redraws constantly. Also if popup steps out of display borders transparency begins to work. (Screenshots below.)
The OS is windows xp SP3 with DirectX 9.0c and NVIDIA graphics card with lastest drivers.
I also tested the program on Win Vista and Win 7 with several different videocards. Works perfectly.
Creating window
m_popup = new popup(__("pew!"), wxPoint(600, 330), wxSize(250, 250));
m_popup->Show(true);
m_popup->SetWindowStyle(wxSTAY_ON_TOP);
m_popup->SetTransparent(150);
SetTopWindow(m_popup);
Transparency code from wxWidgets (2.8.12)
bool wxTopLevelWindowMSW::SetTransparent(wxByte alpha)
{
typedef DWORD (WINAPI *PSETLAYEREDWINDOWATTR)(HWND, DWORD, BYTE, DWORD);
static PSETLAYEREDWINDOWATTR pSetLayeredWindowAttributes = NULL;
if ( pSetLayeredWindowAttributes == NULL )
{
wxDynamicLibrary dllUser32(_T("user32.dll"));
pSetLayeredWindowAttributes = (PSETLAYEREDWINDOWATTR)
dllUser32.GetSymbol(wxT("SetLayeredWindowAttributes"));
}
if ( pSetLayeredWindowAttributes == NULL )
return false;
LONG exstyle = GetWindowLong(GetHwnd(), GWL_EXSTYLE);
// if setting alpha to fully opaque then turn off the layered style
if (alpha == 255)
{
SetWindowLong(GetHwnd(), GWL_EXSTYLE, exstyle & ~WS_EX_LAYERED);
Refresh();
return true;
}
// Otherwise, set the layered style if needed and set the alpha value
if ((exstyle & WS_EX_LAYERED) == 0 )
SetWindowLong(GetHwnd(), GWL_EXSTYLE, exstyle | WS_EX_LAYERED);
// ^ this line seems to cause the problem
// (tried to make the window transparent manually without wxWidgets' help)
return pSetLayeredWindowAttributes(GetHwnd(), 0, (BYTE)alpha, LWA_ALPHA) != 0;
}
DirectX Init
m_d3d = Direct3DCreate9(D3D_SDK_VERSION);
D3DPRESENT_PARAMETERS d3dpp;
ZeroMemory(&d3dpp, sizeof(d3dpp));
d3dpp.Windowed = TRUE;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
d3dpp.hDeviceWindow = hWnd;
d3dpp.BackBufferFormat = D3DFMT_A8R8G8B8;
d3dpp.BackBufferWidth = g_size;
d3dpp.BackBufferHeight = g_size;
m_d3d->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, hWnd,
D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &m_d3ddev);
CUSTOMVERTEX vertices[] =
{
{ 320.0f, 50.0f, 0.5f, 1.0f, D3DCOLOR_ARGB(150, 255, 150, 150), },
{ 520.0f, 400.0f, 0.5f, 1.0f, D3DCOLOR_ARGB(150, 150, 255, 150), },
{ 120.0f, 400.0f, 0.5f, 1.0f, D3DCOLOR_ARGB(150, 150, 150, 255), },
};
m_d3ddev->CreateVertexBuffer(3*sizeof(CUSTOMVERTEX),
0,
CUSTOMFVF,
D3DPOOL_MANAGED,
&v_buffer,
NULL);
VOID* pVoid;
v_buffer->Lock(0, 0, (void**)&pVoid, 0);
memcpy(pVoid, vertices, sizeof(vertices));
v_buffer->Unlock();
Rendering
if (m_render)
{
m_d3ddev->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_ARGB(150, 150, 150, 200), 1.0f, 0);
m_d3ddev->BeginScene();
m_d3ddev->SetFVF(CUSTOMFVF);
m_d3ddev->SetStreamSource(0, v_buffer, 0, sizeof(CUSTOMVERTEX));
m_d3ddev->DrawPrimitive(D3DPT_TRIANGLELIST, 0, 1);
m_d3ddev->EndScene();
m_d3ddev->Present(NULL, NULL, NULL, NULL);
}
Screenshots
Transparency fail: http://clip2net.com/s/5IHAyQ
Transparency is ok when popup is out of display borders: http://clip2net.com/s/5IHCI3
I also wanted to post a screenshot of how it works on the secondary monitor but I cant neither I can post images directly to SO because of rep. Just imagine that its just ok on it like it is on the secondary screenshot.
Thank you.
PARTIALLY SOLVED, see comments.
I am trying to enable AA in a D3D9 application, but am not sure how to set up the surfaces correctly. So far, I have:
IDirect3DDevice9* m_pd3dDevice;
IDirect3DSurface9* screen;
IDirect3DSurface9* msaasurf;
D3DPRESENT_PARAMETERS m_presentationParameters;
Initialization:
m_presentationParameters.Windowed = TRUE;
m_presentationParameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
m_presentationParameters.MultiSampleType = D3DMULTISAMPLE_2_SAMPLES;
m_presentationParameters.MultiSampleQuality = 0;
m_presentationParameters.BackBufferFormat = D3DFMT_UNKNOWN;
m_presentationParameters.EnableAutoDepthStencil = TRUE;
m_presentationParameters.AutoDepthStencilFormat = D3DFMT_D16;
m_presentationParameters.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
// create d3d device
m_pD3D->CreateDevice(
D3DADAPTER_DEFAULT,
D3DDEVTYPE_HAL,
hWnd,
D3DCREATE_HARDWARE_VERTEXPROCESSING,
&m_presentationParameters, &m_pd3dDevice
)
// save screen surface
m_pd3dDevice->GetRenderTarget(0, &screen);
D3DSURFACE_DESC desc;
screen->GetDesc(&desc);
// Create multisample render target
m_pd3dDevice->CreateRenderTarget(
800, 600,
D3DFMT_A8R8G8B8,
desc.MultiSampleType, desc.MultiSampleQuality,
false,
&msaasurf,
NULL
);
And then, for each frame:
// render to multisample surface
m_pd3dDevice->SetRenderTarget(0, msaasurf);
m_pd3dDevice->Clear(0, NULL, D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER, D3DCOLOR_XRGB( 0, 0, 0 ), 1.0f, 0 );
m_pd3dDevice->BeginScene();
// render stuff here
m_pd3dDevice->EndScene();
m_pd3dDevice->SetRenderTarget(0, screen);
// get back buffer
IDirect3DSurface9* backBuffer = NULL;
m_pd3dDevice->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &backBuffer);
// copy rendertarget to backbuffer
m_pd3dDevice->StretchRect(msaasurf, NULL, backBuffer, NULL, D3DTEXF_NONE);
backBuffer->Release();
// Present the backbuffer contents to the display
m_pd3dDevice->Present(NULL, NULL, NULL, NULL);
However, nothing is appearing on my screen (all black). No errors are occuring (I check the return value of all d3d calls). What am I doing wrong?
You don't need the extra surface, you can render directly to the multisampled backbuffer. For me, the only reason to use StretchRect() like this is to get a non-multisampled copy of the scene for use with postprocessing (because multisampled render targets are bad textures, so you need the scene data in a resolved texture). If you want to do this, you don't need to specify multisampling for the backbuffer. A multisampled render target to render the scene to is sufficient then.