Derefering a pointer to a handle is giving me "illegal, right operand.."? - c++

I don't understand what the compiler is trying to enforce. I have a function that sets up an OpenGL context, and returns the HGLRC via a pointer:
BOOL SetupWin32Context(HDC hDC, HGLRC *phRC) {
/* do bunch of work*/
HGLRC hRC = wglCreateContext(hDC);
*phRC = hRC;
return TRUE;}
On *phRC = hRC; I get:
error C2297: '*' : illegal, right operand has type 'HGLRC *'
And that's not making any sense to me.
Full code copied from pastebin
#include "Bindings.h"
#include <Windows.h>
#include <gl\GL.h>
BOOL SetupWin32Context(HDC hDC, HGLRC *phRC)
{
HGLRC hRC = NULL;
PIXELFORMATDESCRIPTOR pfd =
{
sizeof(PIXELFORMATDESCRIPTOR),
1,
PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
PFD_TYPE_RGBA,
32, /* Colordepth of the framebuffer. */
0, 0, 0, 0, 0, 0,
0,
0,
0,
0, 0, 0, 0,
0, /* Bits of the depthbuffer. */
0, /* Bits of the stencilbuffer. */
0, /* Number of Aux buffers. */
PFD_MAIN_PLANE,
0,
0, 0, 0
};
int format = ChoosePixelFormat(hDC, &pfd);
if(format == 0) return FALSE;
if(!SetPixelFormat(hDC,format, &pfd)) return FALSE;
hRC = wglCreateContext(hDC);
if(hRC == 0)
hRC = wglCreateContext(hDC);
if(hRC == 0)
return FALSE;
wglMakeCurrent(hDC, hRC)
*phRC = hRC;
return TRUE;
}

In the real code, you need a semicolon after
wglMakeCurrent(hDC, hRC)
like this:
wglMakeCurrent(hDC, hRC);

Related

calling GLClear multiple times without swapping buffer casues memory leak (Windows MSVC)

When I call GLClear without swapping the buffer, it is causing memory leak.
I am not sure if this is a driver issue or if I am doing something wrong.
If anyone can give it a try or have any idea, please tell me.
TL;DR, this is the code summarized. You can find all the code after this block
static bool CloseFlag = false;
LONG WINAPI WindowProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
static PAINTSTRUCT ps;
switch(uMsg)
{
case WM_SIZE:
glViewport(0, 0, LOWORD(lParam), HIWORD(lParam));
PostMessage(hWnd, WM_PAINT, 0, 0);
return 0;
case WM_CHAR:
//Handle input....
return 0;
case WM_CLOSE:
CloseFlag = true;
return 0;
}
return DefWindowProc(hWnd, uMsg, wParam, lParam);
}
HWND CreateOpenGLWindow(char* title, int x, int y, int width, int height,
BYTE type, DWORD flags)
{
//Create window (with double buffer) and register it...
}
int main ()
{
hWnd = CreateOpenGLWindow("minimal", 0, 0, 256, 256, PFD_TYPE_RGBA, 0);
hRC = wglCreateContext(hDC);
wglMakeCurrent(hDC, hRC);
gladLoadWGL(hDC)
gladLoadGL()
ShowWindow(hWnd, SW_SHOW);
while(!CloseFlag)
{
//Handles input
while(PeekMessage(&msg, nullptr, 0, 0, PM_REMOVE))
{
TranslateMessage(&msg);
DispatchMessage(&msg);
}
glClear(GL_COLOR_BUFFER_BIT);
}
wglMakeCurrent(NULL, NULL);
ReleaseDC(hWnd, hDC);
wglDeleteContext(hRC);
DestroyWindow(hWnd);
return 0;
}
Here's a minimum code for reproducing it that uses Win32.
#ifndef UNICODE
#define UNICODE
#define _UNICODE
#endif
#include <windows.h> /* must include this before GL/gl.h */
#include "glad/glad.h"
#include "glad/glad_wgl.h"
#include "wglext.h"
#include <string>
static bool CloseFlag = false;
void display(HDC hDC)
{
/* rotate a triangle around */
glClear(GL_COLOR_BUFFER_BIT);
glBegin(GL_TRIANGLES);
glColor3f(1.0f, 0.0f, 0.0f);
glVertex2i(0, 1);
glColor3f(0.0f, 1.0f, 0.0f);
glVertex2i(-1, -1);
glColor3f(0.0f, 0.0f, 1.0f);
glVertex2i(1, -1);
glEnd();
glFlush();
SwapBuffers(hDC);
}
void JustClear()
{
glClear(GL_COLOR_BUFFER_BIT);
}
LONG WINAPI WindowProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
static PAINTSTRUCT ps;
switch(uMsg)
{
case WM_SIZE:
glViewport(0, 0, LOWORD(lParam), HIWORD(lParam));
PostMessage(hWnd, WM_PAINT, 0, 0);
return 0;
case WM_CHAR:
switch (wParam)
{
case 27: /* ESC key */
//PostQuitMessage(0);
CloseFlag = true;
break;
}
return 0;
case WM_CLOSE:
CloseFlag = true;
//PostQuitMessage(0);
return 0;
}
return DefWindowProc(hWnd, uMsg, wParam, lParam);
}
HWND CreateOpenGLWindow(char* title, int x, int y, int width, int height,
BYTE type, DWORD flags)
{
int pf;
HDC hDC;
HWND hWnd;
WNDCLASS wc;
PIXELFORMATDESCRIPTOR pfd;
static HINSTANCE hInstance = 0;
/* only register the window class once - use hInstance as a flag. */
if (!hInstance) {
hInstance = GetModuleHandle(NULL);
wc.style = CS_OWNDC;
wc.lpfnWndProc = (WNDPROC)WindowProc;
wc.cbClsExtra = 0;
wc.cbWndExtra = 0;
wc.hInstance = hInstance;
wc.hIcon = LoadIcon(NULL, IDI_WINLOGO);
wc.hCursor = LoadCursor(NULL, IDC_ARROW);
wc.hbrBackground = NULL;
wc.lpszMenuName = NULL;
wc.lpszClassName = L"OpenGL";
if (!RegisterClass(&wc)) {
return NULL;
}
}
std::wstring className = L"OpenGL";
hWnd = CreateWindow(className.c_str(), className.c_str(), WS_OVERLAPPEDWINDOW |
WS_CLIPSIBLINGS | WS_CLIPCHILDREN,
x, y, width, height, NULL, NULL, hInstance, NULL);
if (hWnd == NULL) {
return NULL;
}
hDC = GetDC(hWnd);
/* there is no guarantee that the contents of the stack that become
the pfd are zeroed, therefore _make sure_ to clear these bits. */
memset(&pfd, 0, sizeof(pfd));
pfd.nSize = sizeof(pfd);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER | flags;
pfd.iPixelType = type;
pfd.cColorBits = 32;
pf = ChoosePixelFormat(hDC, &pfd);
if (pf == 0) {
return 0;
}
if (SetPixelFormat(hDC, pf, &pfd) == FALSE) {
return 0;
}
DescribePixelFormat(hDC, pf, sizeof(PIXELFORMATDESCRIPTOR), &pfd);
ReleaseDC(hWnd, hDC);
return hWnd;
}
int main ()
{
HDC hDC; /* device context */
HGLRC hRC; /* opengl context */
HWND hWnd; /* window */
MSG msg; /* message */
hWnd = CreateOpenGLWindow("minimal", 0, 0, 256, 256, PFD_TYPE_RGBA, 0);
if (hWnd == NULL)
exit(1);
hDC = GetDC(hWnd);
hRC = wglCreateContext(hDC);
wglMakeCurrent(hDC, hRC);
if(!gladLoadWGL(hDC))
{
return -1;
}
if (!gladLoadGL()) //Load Glad
{
return -1;
}
ShowWindow(hWnd, SW_SHOW);
while(!CloseFlag)
{
//Handles input
while(PeekMessage(&msg, nullptr, 0, 0, PM_REMOVE))
{
TranslateMessage(&msg);
DispatchMessage(&msg);
}
//Render
//display(hDC);
JustClear();
}
wglMakeCurrent(NULL, NULL);
ReleaseDC(hWnd, hDC);
wglDeleteContext(hRC);
DestroyWindow(hWnd);
return 0;
}
wglext.h is from here
Here's the OpenGL glad config
/*
OpenGL loader generated by glad 0.1.36 on Fri Oct 28 11:12:34 2022.
Language/Generator: C/C++
Specification: gl
APIs: gl=3.3
Profile: compatibility
Extensions:
Loader: True
Local files: False
Omit khrplatform: False
Reproducible: False
Commandline:
--profile="compatibility" --api="gl=3.3" --generator="c" --spec="gl" --extensions=""
Online:
https://glad.dav1d.de/#profile=compatibility&language=c&specification=gl&loader=on&api=gl%3D3.3
*/
and Glad wgl 1.0 with all the extensions
and here's the CMake I used
cmake_minimum_required(VERSION 3.14)
set (CMAKE_CXX_STANDARD 11)
# For Clang to do parsing
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
# set the project name
project(Example)
add_executable(EXAMPLE_EXE ${CMAKE_CURRENT_LIST_DIR}/Src/main.cpp)
add_subdirectory("${CMAKE_CURRENT_LIST_DIR}/External/glad_v0.1.36")
find_package(OpenGL REQUIRED)
target_include_directories(EXAMPLE_EXE PUBLIC "${CMAKE_CURRENT_LIST_DIR}/External/wglExt")
target_link_libraries(EXAMPLE_EXE PUBLIC OpenGL::GL glad)

OpenGL context creation in Windows

Here's my code. I don't want a lib for device context/rc creation.
Nothing will draw at all. Maybe I set up DC or RC up wrong?
I'm trying to make a game engine from scratch.
#pragma once
#include <cstdlib>
#include <windows.h>
#include <GL/GL.h>
#pragma comment (lib, "opengl32.lib")
#include <iostream>
using namespace std;
class shadoeGE
{
public:
HWND s_Window = NULL;
int __cdecl Init(const char*, int, int);
private:
};
LRESULT __stdcall s_WindowEvent(HWND hWnd, unsigned int message, unsigned int wParam, long lParam)
{
switch (message)
{
default:
return DefWindowProc(hWnd, message, wParam, lParam);
};
};
int shadoeGE::Init(const char* title, int x, int y)
{
MSG msg = { 0 };
WNDCLASS wc = { 0 };
wc.lpfnWndProc = s_WindowEvent;
wc.hInstance = GetModuleHandle(NULL);
wc.hbrBackground = (HBRUSH)(COLOR_BACKGROUND);
wc.lpszClassName = "sWindowClass";
wc.style = CS_OWNDC;
if (!RegisterClass(&wc))
{
return 0;
};
this->s_Window = CreateWindowA(wc.lpszClassName, title, WS_OVERLAPPEDWINDOW | WS_VISIBLE, 0, 0, x, y, 0, 0, GetModuleHandle(NULL), 0);
while (GetMessage(&msg, NULL, 0, 0) > 0)
{
DispatchMessage(&msg);
};
PIXELFORMATDESCRIPTOR pfd =
{
sizeof(PIXELFORMATDESCRIPTOR),
1,
PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
PFD_TYPE_RGBA,
32,
0, 0, 0, 0, 0, 0,
0,
0,
0,
0, 0, 0, 0,
24,
8,
0,
PFD_MAIN_PLANE,
0,
0, 0, 0
};
HDC deviceContextHandle = GetDC(this->s_Window);
int windowsChosen = ChoosePixelFormat(deviceContextHandle, &pfd);
SetPixelFormat(deviceContextHandle, windowsChosen, &pfd);
HGLRC ourOpenGLRenderingContext = wglCreateContext(deviceContextHandle);
wglMakeCurrent(deviceContextHandle, ourOpenGLRenderingContext);
glViewport(0, 0, 640, 480);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBegin(GL_TRIANGLES);
glColor3f(0.1, 0.2, 0.3);
glVertex3f(0, 0, 0);
glVertex3f(1, 0, 0);
glVertex3f(0, 1, 0);
glEnd();
SwapBuffers(deviceContextHandle);
return 1;
};
You have to implement a message loop:
MSG msg;
while( ::GetMessage( &msg, 0, 0, 0 ) )
::DispatchMessage( &msg );
You have to draw the scene in the message loop, when the WM_PAINT message is sent:
LRESULT CALLBACK shadoeGE::s_WindowEvent( HWND hWnd, unsigned int msg, WPARAM wparam, LPARAM lparam )
{
switch(msg)
{
// [...]
case WM_PAINT:
wnd.Display();
break;
}
return DefWindowProc( hWnd, msg, wparam, lparam );
}
void shadoeGE::Display( void )
{
RECT clientRect;
::GetClientRect( hOGLWnd, &clientRect );
glViewport( 0, 0, clientRect.right-clientRect.left, clientRect.bottom-clientRect.top );
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
// [...]
HDC hDC = ::GetDC( hOGLWnd );
::SwapBuffers( hDC );
::ReleaseDC( hOGLWnd, hDC );
}
See the example:
#include <windows.h>
#include <GL/gl.h>
#include <vector>
#include <stdexcept>
class shadoeGE
{
public:
static LRESULT CALLBACK s_WindowEvent( HWND hWnd, unsigned int msg, WPARAM wparam, LPARAM lparam );
HWND Init( int width, int height );
void DestroyWindow( void );
void MessageLoop( void );
void Display( void );
private:
HWND hOGLWnd = NULL;
HGLRC hOGLRenderContext = NULL;
};
std::wstring wnd_class( L"my_wnd_class" );
shadoeGE wnd;
int main()
{
int w = 800;
int h = 600;
HWND hWnd = wnd.Init( w, h );
if ( hWnd == 0 )
throw std::runtime_error( "error initializing window" );
wnd.MessageLoop();
wnd.DestroyWindow();
return 0;
}
HWND shadoeGE::Init( int width, int height )
{
// Get module handle
HMODULE hModule = ::GetModuleHandle( 0 );
if (!hModule)
return NULL;
// Create window class
WNDCLASSEX wndClassData;
memset( &wndClassData, 0, sizeof( WNDCLASSEX ) );
wndClassData.cbSize = sizeof( WNDCLASSEX );
wndClassData.style = CS_DBLCLKS;
wndClassData.lpfnWndProc = s_WindowEvent;
wndClassData.cbClsExtra = 0;
wndClassData.cbWndExtra = 0;
wndClassData.hInstance = hModule;
wndClassData.hIcon = ::LoadIcon(0,IDI_APPLICATION);
wndClassData.hCursor = ::LoadCursor(0,IDC_ARROW);
wndClassData.hbrBackground = ::CreateSolidBrush(COLOR_WINDOW+1);
wndClassData.lpszMenuName = 0;
wndClassData.lpszClassName = wnd_class.c_str();
wndClassData.hIconSm = 0;
if ( !::RegisterClassEx( &wndClassData ) )
return false;
// Creaate Window
hOGLWnd = ::CreateWindow( wnd_class.c_str(), NULL, WS_OVERLAPPEDWINDOW, 0, 0, width, height, NULL, NULL, hModule, NULL);
if ( hOGLWnd == NULL )
return NULL;
// Get device context
HDC hDC = ::GetDC( hOGLWnd );
// Create OpenGL context
DWORD pixelFormatFlags = PFD_SUPPORT_OPENGL | PFD_SUPPORT_COMPOSITION | PFD_GENERIC_ACCELERATED | PFD_DRAW_TO_WINDOW | PFD_DOUBLEBUFFER;
PIXELFORMATDESCRIPTOR pfd =
{
sizeof(PIXELFORMATDESCRIPTOR),
1,
pixelFormatFlags, //Flags
PFD_TYPE_RGBA, //The kind of framebuffer. RGBA or palette.
32, //Colordepth of the framebuffer.
0, 0, 0, 0, 0, 0,
0,
0,
0,
0, 0, 0, 0,
24, //Number of bits for the depthbuffer
8, //Number of bits for the stencilbuffer
0, //Number of Aux buffers in the framebuffer.
PFD_MAIN_PLANE,
0,
0, 0, 0
};
int pixelFormat = ::ChoosePixelFormat( hDC, &pfd );
::SetPixelFormat( hDC, pixelFormat, &pfd );
hOGLRenderContext = ::wglCreateContext( hDC );
// make OpenGL context the current context
::wglMakeCurrent( hDC, hOGLRenderContext );
// release device context
::ReleaseDC( hOGLWnd, hDC );
// show the window
::ShowWindow( hOGLWnd, SW_SHOWDEFAULT );
return hOGLWnd;
}
void shadoeGE::MessageLoop( void )
{
MSG msg;
while( ::GetMessage( &msg, 0, 0, 0 ) )
::DispatchMessage( &msg );
}
void shadoeGE::DestroyWindow(void)
{
::DestroyWindow( hOGLWnd );
::wglMakeCurrent( NULL, NULL );
::wglDeleteContext( hOGLRenderContext );
HMODULE hModule = ::GetModuleHandle( 0 );
if (!hModule)
return;
::UnregisterClass( wnd_class.c_str(), hModule );
}
LRESULT CALLBACK shadoeGE::s_WindowEvent( HWND hWnd, unsigned int msg, WPARAM wparam, LPARAM lparam )
{
switch(msg)
{
case WM_DESTROY:
PostQuitMessage(0);
break;
case WM_PAINT:
wnd.Display();
break;
}
return DefWindowProc( hWnd, msg, wparam, lparam );
}
void shadoeGE::Display( void )
{
RECT clientRect;
::GetClientRect( hOGLWnd, &clientRect );
glViewport( 0, 0, clientRect.right-clientRect.left, clientRect.bottom-clientRect.top );
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glBegin(GL_TRIANGLES);
glColor3f(0.1, 0.2, 0.3);
glVertex3f(0, 0, 0);
glVertex3f(1, 0, 0);
glVertex3f(0, 1, 0);
glEnd();
HDC hDC = ::GetDC( hOGLWnd );
::SwapBuffers( hDC );
::ReleaseDC( hOGLWnd, hDC );
}

Why I cannot check OpenGL version in this fake OpenGL context?

I'm trying to check OpenGL version creating a fake OpenGL context, but always I'm getting version 0. I create a fake context with this code:
PIXELFORMATDESCRIPTOR pfd;
ZeroMemory( &pfd, sizeof( pfd ) );
pfd.nSize = sizeof( pfd );
pfd.nVersion = 1;
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL |
PFD_DOUBLEBUFFER;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = 24;
pfd.cDepthBits = 16;
pfd.iLayerType = PFD_MAIN_PLANE;
WINDOWINFO info;
HWND mHWND;
info.cbSize = sizeof(WINDOWINFO);
GetWindowInfo(mHWND,&info);
HDC mDC;
mDC = GetDC(mHWND);
int fmt = ChoosePixelFormat( mDC, &pfd );
SetPixelFormat( mDC, fmt, &pfd );
HGLRC mOGLCtx;
mOGLCtx = wglCreateContext( mDC );
wglMakeCurrent(mDC, mOGLCtx);
And try to check version with this:
glGetString(GL_VERSION)
Try to get (GetActiveWindow, GetForegroundWindow) or create (CreateWindowEx) window handle before getting its DC or try to GetDC(0).
Can create minimal OpenGL context with that:
HWND wnd = CreateWindow(
"STATIC",
"GL",
WS_OVERLAPPEDWINDOW | WS_CLIPSIBLINGS | WS_CLIPCHILDREN,
0, 0, 16, 16,
NULL, NULL,
NULL, NULL );
HDC dc = GetDC( wnd );
PIXELFORMATDESCRIPTOR pfd = {
sizeof(PIXELFORMATDESCRIPTOR), 1,
PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL,
PFD_TYPE_RGBA, 32,
0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0,
16, 0,
0, PFD_MAIN_PLANE, 0, 0, 0, 0
};
int fmt = ChoosePixelFormat( dc, &pfd );
SetPixelFormat( dc, fmt, &pfd );
HGLRC rc = wglCreateContext( dc );
wglMakeCurrent( dc, rc );
Check OpenGL version with the same manner:
glGetString(GL_VERSION)
To be more efficient, after all:
// Clean GL context
wglDeleteContext( rc );
ReleaseDC( wnd, dc );
DestroyWindow( wnd );

Texture fail to render - DirectX 9

I have this code in DirectX 9, in which I need to get the DC from a COM interface and draw it. I do get the DC and it contains the image, but I get a black screen. Any ideas why?
LPDIRECT3DSURFACE9 pRenderSurface = NULL, pRenderSurfaceTMP = NULL;
m_pRenderTexture->GetSurfaceLevel(0, &pRenderSurface);
if (pRenderSurface == NULL)
return FALSE;
m_pD3DDevice->CreateOffscreenPlainSurface(m_nWidth, m_nHeight,
D3DFMT_X8R8G8B8, D3DPOOL_SYSTEMMEM, &pRenderSurfaceTMP, 0);
m_pD3DDevice->GetRenderTargetData(pRenderSurface,pRenderSurfaceTMP);
HDC hDC = NULL;
hr = pRenderSurfaceTMP->GetDC(&hDC);
if (FAILED(hr))
return FALSE;
if (m_pViewObject != NULL)
{
// RECT is relative to the windowless container rect
RECTL rcRect = {0, 0, m_nWidth, m_nHeight};
// Draw onto the DC!
hr = m_pViewObject->Draw(DVASPECT_CONTENT, 1,
NULL, NULL, NULL, hDC, &rcRect, NULL, NULL,
0);
}
pRenderSurface->ReleaseDC(hDC);
pRenderSurface->Release();
// Draw the surface
m_pD3DDevice->SetStreamSource( 0, m_pVertexBuffer, 0, sizeof(Vertex) );
m_pD3DDevice->SetTexture( 0, m_pRenderTexture );
hr = m_pD3DDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
m_hbrBackground = NULL;
pRenderSurfaceTMP->Release();
Thanks in advance
Make sure that m_pRenderTexture is created with D3DUSAGE_RENDERTARGET. The size and format of both pRenderSurface and pRenderSurfaceTMP should match. For more info, see when GetRenderTargetData fails: http://msdn.microsoft.com/en-us/library/windows/desktop/bb174405(v=vs.85).aspx
Try this:
m_pD3DDevice->CreateTexture(m_nWidth, m_nHeight, 1, 0, D3DFMT_X8R8G8B8,
D3DPOOL_MANAGED, &m_pRenderTexture, NULL);
// ...
LPDIRECT3DSURFACE9 pRenderSurface = NULL;
if (FAILED(m_pRenderTexture->GetSurfaceLevel(0, &pRenderSurface))) return FALSE;
HDC hDC = NULL;
if (FAILED(pRenderSurface->GetDC(&hDC))) return FALSE;
if (m_pViewObject != NULL)
{
RECTL rcRect = {0, 0, m_nWidth, m_nHeight};
m_pViewObject->Draw(DVASPECT_CONTENT, 1,
NULL, NULL, NULL, hDC, &rcRect, NULL, NULL, 0);
}
pRenderSurface->ReleaseDC(hDC);
pRenderSurface->Release();
m_pD3DDevice->SetStreamSource( 0, m_pVertexBuffer, 0, sizeof(Vertex) );
m_pD3DDevice->SetTexture( 0, m_pRenderTexture );
m_pD3DDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
or this:
m_pD3DDevice->CreateTexture(m_nWidth, m_nHeight, 1, 0, D3DFMT_X8R8G8B8,
D3DPOOL_DEFAULT, &m_pRenderTexture, NULL);
// ...
LPDIRECT3DSURFACE9 pRenderSurfaceTMP = NULL;
m_pD3DDevice->CreateOffscreenPlainSurface(m_nWidth, m_nHeight,
D3DFMT_X8R8G8B8, D3DPOOL_SYSTEMMEM, &pRenderSurfaceTMP, 0);
HDC hDC = NULL;
if (FAILED(pRenderSurfaceTMP->GetDC(&hDC))) return FALSE;
if (m_pViewObject != NULL)
{
RECTL rcRect = {0, 0, m_nWidth, m_nHeight};
m_pViewObject->Draw(DVASPECT_CONTENT, 1,
NULL, NULL, NULL, hDC, &rcRect, NULL, NULL, 0);
}
pRenderSurfaceTMP->ReleaseDC(hDC);
LPDIRECT3DSURFACE9 pRenderSurface = NULL;
if (FAILED(m_pRenderTexture->GetSurfaceLevel(0, &pRenderSurface)))
return FALSE;
if (FAILED(m_pD3DDevice->UpdateSurface(pRenderSurfaceTMP, NULL,
pRenderSurface, NULL))) return FALSE;
pRenderSurface->Release();
m_pD3DDevice->SetStreamSource( 0, m_pVertexBuffer, 0, sizeof(Vertex) );
m_pD3DDevice->SetTexture( 0, m_pRenderTexture );
m_pD3DDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);

Simple DirectX 11 program runtime error

I'm new to DirectX 11, and I have managed to get the Drawing Triangle chapter in my book.
I have written this code, but I get a runtime error:
"Unhandled exception at 0x00cd14e6 in DirectX 11.exe: 0xc0000005: Access violation reading location 0x00000000."
My compiler show that the error is at the InitPipeline function, but I can't find the error there.
What have I done wrong?
Code:
#include <windows.h>
#include <windowsx.h>
#include <d3d11.h>
#include <D3DX11.h>
#include <D3DX10.h>
#pragma comment(lib,"d3d11.lib")
#pragma comment(lib,"d3dx11.lib")
#pragma comment(lib,"d3dx10.lib")
#define SCREEN_WIDTH 800
#define SCREEN_HEIGHT 600
IDXGISwapChain *swapchain;
ID3D11Device *dev;
ID3D11DeviceContext *devcon;
ID3D11InputLayout *pInputLayout;
ID3D11Buffer *pVBuffer;
ID3D11RenderTargetView *backbuffer;
ID3D11VertexShader *pVS;
ID3D11PixelShader *pPS;
// Type
struct VERTEX
{
FLOAT X,Y,Z;
D3DXCOLOR Color;
};
//DirectX
void InitPipeline()
{
// load and compile the two shaders
ID3D10Blob *VS, *PS;
D3DX11CompileFromFile("shaders.hlsl", 0, 0, "VShader", "vs_5_0", 0, 0, 0, &VS, 0, 0);
D3DX11CompileFromFile("shaders.hlsl", 0, 0, "PShader", "ps_5_0", 0, 0, 0, &PS, 0, 0);
//debug
if(!dev)
{
MessageBox(NULL, "DEV = NULL", "ERROR", NULL);
}
if(!devcon)
{
MessageBox(NULL, "DEVCON = NULL", "ERROR", NULL);
}
if(!VS)
{
MessageBox(NULL, "VS = NULL", "ERROR", NULL);
}
if(!PS)
{
MessageBox(NULL, "PS = NULL", "ERROR", NULL);
}
// encapsulate both shaders into shader objects
dev->CreateVertexShader(VS->GetBufferPointer(), VS->GetBufferSize(), NULL, &pVS);
dev->CreatePixelShader(PS->GetBufferPointer(), PS->GetBufferSize(), NULL, &pPS);
// set the shader objects
devcon->VSSetShader(pVS, 0, 0);
devcon->PSSetShader(pPS, 0, 0);
D3D11_INPUT_ELEMENT_DESC ied[] =
{
{"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0,0,D3D11_INPUT_PER_VERTEX_DATA,0},
{"COLOR",0,DXGI_FORMAT_R32G32B32A32_FLOAT,0,D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0},
};
dev->CreateInputLayout(ied, 2, VS->GetBufferPointer(), VS->GetBufferSize(), &pInputLayout);
devcon->IASetInputLayout(pInputLayout);
}
void InitGraphics()
{
VERTEX OurVertices[] =
{
{1,0,0,D3DXCOLOR(1,0,0,1)},
{0,-1,0,D3DXCOLOR(1,0,0,1)},
{0,0,1,D3DXCOLOR(1,0,0,1)},
};
D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(D3D11_BUFFER_DESC));
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.ByteWidth = sizeof(VERTEX)*3;
bd.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
bd.Usage = D3D11_USAGE_DYNAMIC;
dev->CreateBuffer(&bd, NULL, &pVBuffer);
D3D11_MAPPED_SUBRESOURCE ms;
devcon->Map(pVBuffer, NULL, D3D11_MAP_WRITE_DISCARD, NULL, &ms);
memcpy(ms.pData, OurVertices, sizeof(OurVertices));
devcon->Unmap(pVBuffer, NULL);
}
void RenderFrame()
{
devcon->ClearRenderTargetView(backbuffer, D3DXCOLOR(0,0,1,1));
UINT stride = sizeof(VERTEX);
UINT offset = 0;
devcon->IASetVertexBuffers(0, 1, &pVBuffer, &stride, &offset);
devcon->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
devcon->Draw(3,0);
swapchain->Present(0,0);
}
void InitD3D(HWND hWnd)
{
// create a struct to hold information about the swap chain
DXGI_SWAP_CHAIN_DESC scd;
// clear out the struct for use
ZeroMemory(&scd, sizeof(DXGI_SWAP_CHAIN_DESC));
// fill the swap chain description struct
scd.BufferCount = 1; // one back buffer
scd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; // use 32-bit color
scd.BufferDesc.Width = SCREEN_WIDTH; // set the back buffer width
scd.BufferDesc.Height = SCREEN_HEIGHT; // set the back buffer height
scd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT; // how swap chain is to be used
scd.OutputWindow = hWnd; // the window to be used
scd.SampleDesc.Count = 4; // how many multisamples
scd.Windowed = TRUE; // windowed/full-screen mode
scd.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH; // allow full-screen switching
// create a device, device context and swap chain using the information in the scd struct
if(FAILED(D3D11CreateDeviceAndSwapChain(NULL,
D3D_DRIVER_TYPE_HARDWARE,
NULL,
NULL,
NULL,
NULL,
D3D11_SDK_VERSION,
&scd,
&swapchain,
&dev,
NULL,
&devcon)))
{
MessageBox(NULL, "D3D11CreateDeviceAndSwapChain Failed", "ERROR", NULL);
}
// get the address of the back buffer
ID3D11Texture2D *pBackBuffer;
swapchain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&pBackBuffer);
// use the back buffer address to create the render target
dev->CreateRenderTargetView(pBackBuffer, NULL, &backbuffer);
pBackBuffer->Release();
// set the render target as the back buffer
devcon->OMSetRenderTargets(1, &backbuffer, NULL);
// Set the viewport
D3D11_VIEWPORT viewport;
ZeroMemory(&viewport, sizeof(D3D11_VIEWPORT));
viewport.TopLeftX = 0;
viewport.TopLeftY = 0;
viewport.Width = SCREEN_WIDTH;
viewport.Height = SCREEN_HEIGHT;
devcon->RSSetViewports(1, &viewport);
InitPipeline();
InitGraphics();
}
void CleanD3D()
{
swapchain->SetFullscreenState(FALSE, NULL);
dev->Release();
devcon->Release();
swapchain->Release();
pInputLayout->Release();
pVBuffer->Release();
backbuffer->Release();
pVS->Release();
pPS->Release();
}
// Window
LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
{
switch(message)
{
case WM_DESTROY:
PostQuitMessage(0);
return 0;
break;
default:
return DefWindowProc(hWnd, message, wParam, lParam);
}
}
INT WINAPI WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine, INT nCmdShow)
{
HWND hWnd;
WNDCLASSEX wc;
ZeroMemory(&wc, sizeof(WNDCLASSEX));
wc.cbSize = sizeof(WNDCLASSEX);
//wc.hbrBackground = (HBRUSH)GetStockObject(GRAY_BRUSH);
wc.hCursor = LoadCursor(NULL, IDC_ARROW);
wc.hIcon = (HICON)LoadIcon(NULL, IDI_APPLICATION);
wc.hInstance = hInstance;
wc.lpfnWndProc = WndProc;
wc.lpszClassName = "DirectXWindow";
wc.style = CS_HREDRAW | CS_VREDRAW;
RegisterClassEx(&wc);
hWnd = CreateWindowEx(NULL,
"DirectXWindow",
"DirectX 11 (June 2010",
WS_OVERLAPPEDWINDOW,
0,0,
SCREEN_WIDTH, SCREEN_HEIGHT,
NULL,
NULL,
hInstance,
NULL);
ShowWindow(hWnd, nCmdShow);
InitD3D(hWnd);
UpdateWindow(hWnd);
MSG msg = {0};
while(msg.message != WM_QUIT)
{
if(PeekMessage(&msg, NULL, 0, 0, PM_REMOVE))
{
TranslateMessage(&msg);
DispatchMessage(&msg);
}
else
{
RenderFrame();
}
}
CleanD3D();
return msg.wParam;
}
That error basically means that you are using a null pointer somewhere. Check all your pointers. My guess is that some function somewhere is failing to set your pointers (or you have the parameters in the wrong order).
You are using a NULL pointer. You define 8 global D3D interface pointers and you are using dev and devcon within InitPipeline they need to be allocated and initialized before you can call any methods on them.
Try setting the scd.SampleDesc.Count = 1
This value tells Direct3D how much detail should be put into anti-aliasing, the higher the number the better. Direct3D 11 video cards are guaranteed to support up to 4 here, but the minimum is 1.
Perhaps your video card doesn't support Direct3D 11?