Creating a 1 pixel Texture Issue. - c++

I am trying to make a texture of 1 pixel, color is a variable passed to the function, and i have the following code:
unsigned char texArray[4];
texArray[0] = (unsigned char) color.x;
texArray[1] = (unsigned char) color.y;
texArray[2] = (unsigned char) color.z;
texArray[3] = (unsigned char) color.w;
ID3D11Texture2D *pTexture = nullptr;
ID3D11ShaderResourceView* pShaderResourceView;
D3D11_TEXTURE2D_DESC texDesc;
ZeroMemory(&texDesc, sizeof(D3D11_TEXTURE2D_DESC));
texDesc.ArraySize = 1;
texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
texDesc.CPUAccessFlags = 0;
texDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
texDesc.MipLevels = 1;
texDesc.MiscFlags = 0;
texDesc.SampleDesc.Count = 1;
texDesc.SampleDesc.Quality = 0;
texDesc.Usage = D3D11_USAGE_DEFAULT;
texDesc.Height = 1;
texDesc.Width = 1;
D3D11_SUBRESOURCE_DATA texInitData;
ZeroMemory(&texInitData, sizeof(D3D11_SUBRESOURCE_DATA));
texInitData.pSysMem = texArray;
HRESULT hr;
hr = m_pDevice->CreateTexture2D(&texDesc, &texInitData, &pTexture);
hr = m_pDevice->CreateShaderResourceView(pTexture, NULL, &pShaderResourceView);
But it fails to create the texture2D (return nullptr) & hr contains "parameter is incorrect".
What is wrong/missing?

Since you are creating a 2D-texture, you will need to specify the SysMemPitch value in texInitData, since you are creating a 2D texture (even though it is just 1x1 pixel in this case). You should specify it to sizeof(unsigned char) * 4 in this case, since the next line would begin after that many bytes if there was another line.

It's a good practice to set the debug flag on when creating D3D device, in this case, you will get more information from Direct3D in Visual Studio's output window when running in debug mode.
UINT flags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
#if defined( DEBUG ) || defined( _DEBUG )
flags |= D3D11_CREATE_DEVICE_DEBUG;
#endif
HRESULT hr;
if (FAILED (hr = D3D11CreateDeviceAndSwapChain( NULL,
D3D_DRIVER_TYPE_HARDWARE,
NULL,
flags,
&FeatureLevelsRequested,
numLevelsRequested,
D3D11_SDK_VERSION,
&sd,
&g_pSwapChain,
&g_pd3dDevice,
&FeatureLevelsSupported,
&g_pImmediateContext )))
{
return hr;
}
and here is what I got from your code, you can easily know what's wrong from the message.
D3D11 ERROR: ID3D11Device::CreateTexture2D: pInitialData[0].SysMemPitch cannot be 0 [ STATE_CREATION ERROR #100: CREATETEXTURE2D_INVALIDINITIALDATA]
First-chance exception at 0x74891EE9 in Teapot.exe: Microsoft C++ exception: _com_error at memory location 0x00E4F668.
First-chance exception at 0x74891EE9 in Teapot.exe: Microsoft C++ exception: _com_error at memory location 0x00E4F668.
D3D11 ERROR: ID3D11Device::CreateTexture2D: Returning E_INVALIDARG, meaning invalid parameters were passed. [ STATE_CREATION ERROR #104: CREATETEXTURE2D_INVALIDARG_RETURN]

Related

DirectCompute CreateBuffer fails with error 0x80070057 (E_INVALIDARG)

I'm trying to create a buffer in GPU memory to upload data from CPU. GPU access will be readonly. Data will be used as an input buffer for a compute shader.
CreateBuffer() fails with error 0x80070057 (E_INVALIDARG). I read the docs and read it again without discovering which argument cause the failure.
Here is an extract from my code where I marked the failure:
HRESULT hr = S_OK;
RECT rc;
GetClientRect( g_hWnd, &rc );
UINT width = rc.right - rc.left;
UINT height = rc.bottom - rc.top;
UINT createDeviceFlags = 0;
#ifdef _DEBUG
createDeviceFlags |= D3D11_CREATE_DEVICE_DEBUG;
#endif
D3D_DRIVER_TYPE driverTypes[] =
{
#ifdef WARP
D3D_DRIVER_TYPE_REFERENCE,
#else
D3D_DRIVER_TYPE_HARDWARE,
#endif
};
UINT numDriverTypes = sizeof( driverTypes ) / sizeof( driverTypes[0] );
DXGI_SWAP_CHAIN_DESC sd;
ZeroMemory( &sd, sizeof( sd ) );
sd.BufferCount = 1;
sd.BufferDesc.Width = width;
sd.BufferDesc.Height = height;
sd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
sd.BufferDesc.RefreshRate.Numerator = 60;
sd.BufferDesc.RefreshRate.Denominator = 1;
sd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT | DXGI_USAGE_UNORDERED_ACCESS | DXGI_USAGE_SHADER_INPUT;
sd.OutputWindow = g_hWnd;
sd.SampleDesc.Count = 1;
sd.SampleDesc.Quality = 0;
sd.Windowed = TRUE;
D3D_FEATURE_LEVEL FeatureLevels = D3D_FEATURE_LEVEL_11_0;
for( UINT driverTypeIndex = 0; driverTypeIndex < numDriverTypes; driverTypeIndex++ )
{
g_driverType = driverTypes[driverTypeIndex];
hr = D3D11CreateDeviceAndSwapChain( NULL, g_driverType, NULL, createDeviceFlags, &FeatureLevels,1,
D3D11_SDK_VERSION, &sd, &g_pSwapChain, &g_pd3dDevice, NULL, &g_pImmediateContext );
if( SUCCEEDED( hr ) )
break;
}
if( FAILED( hr ) )
return hr;
// check if GPU supports doubles
D3D11_FEATURE_DATA_DOUBLES fdDoubleSupport;
g_pd3dDevice->CheckFeatureSupport( D3D11_FEATURE_DOUBLES, &fdDoubleSupport, sizeof(fdDoubleSupport) );
GPUcanDoDoubles = fdDoubleSupport.DoublePrecisionFloatShaderOps;
D3D11_BUFFER_DESC desc;
BYTE Data[200];
D3D11_SUBRESOURCE_DATA InitData;
desc.BindFlags = D3D11_BIND_UNORDERED_ACCESS | D3D11_BIND_SHADER_RESOURCE;
desc.Usage = D3D11_USAGE_DYNAMIC;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
desc.ByteWidth = 200;
desc.MiscFlags = D3D11_RESOURCE_MISC_BUFFER_STRUCTURED;
desc.StructureByteStride = 2;
InitData.pSysMem = Data;
hr = g_pd3dDevice->CreateBuffer(&desc, &InitData, &g_pcbFractal); // <== E_INVALIARG here
// Create constant buffer
D3D11_BUFFER_DESC Desc;
Desc.Usage = D3D11_USAGE_DYNAMIC;
Desc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
Desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
Desc.MiscFlags = 0;
Desc.ByteWidth = ((( (GPUcanDoDoubles) ? sizeof(MandelConstantsDoubles) : sizeof(MandelConstantsNoDoubles) ) + 15)/16)*16; // must be multiple of 16 bytes
hr = g_pd3dDevice->CreateBuffer( &Desc, NULL, &g_pcbFractal); // This one succeed
Any help appreciated.
Enabling Direct3D 11 Device Debugging and looking at the debug output window answers your question:
D3D11 ERROR: ID3D11Device::CreateBuffer: When creating a buffer with the MiscFlag D3D11_RESOURCE_MISC_BUFFER_STRUCTURED specified, the StructureByteStride must be greater than zero, no greater than 2048, and a multiple of 4. [ STATE_CREATION ERROR #2097339: CREATEBUFFER_INVALIDSTRUCTURESTRIDE]
If you fix that, you get:
D3D11 ERROR: ID3D11Device::CreateBuffer: A D3D11_USAGE_DYNAMIC Resource cannot be bound to certain parts of the graphics pipeline, but must have at least one BindFlags bit set. The BindFlags bits (0x88) have the following settings: D3D11_BIND_STREAM_OUTPUT (0), D3D11_BIND_RENDER_TARGET (0), D3D11_BIND_DEPTH_STENCIL (0), D3D11_BIND_UNORDERED_ACCESS (1). [ STATE_CREATION ERROR #64: CREATEBUFFER_INVALIDBINDFLAGS]
Which is basically telling you that you can't combine D3D11_USAGE_DYNAMIC with D3D11_BIND_UNORDERED_ACCESS

DXGI_FORMAT_YUY2 textures return different RowPitch under Windows 8.1 and Windows 10

My build environment is as follows:
Windows 8.1, VS2012, desktop application built using windows 8.0 SDK and C++.
When I run my program on windows 8.1 the RowPitch prints 2560 but under windows 10 the same program prints 5120.
What am I doing wrong here?
Here is the code, Thanks for all the replies.
#include <d3d11.h>
static bool init_directx11(ID3D11Device **pDevice, ID3D11DeviceContext **pDeviceContext)
{
D3D_FEATURE_LEVEL featureLevels[] = {D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_10_1, D3D_FEATURE_LEVEL_10_0, D3D_FEATURE_LEVEL_9_1};
D3D_FEATURE_LEVEL featureLevel;
UINT createDeviceFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
HRESULT hr = D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, createDeviceFlags, featureLevels, ARRAYSIZE(featureLevels), D3D11_SDK_VERSION, pDevice,
&featureLevel, pDeviceContext);
return SUCCEEDED(hr);
}
int _tmain(int argc, _TCHAR* argv[])
{
ID3D11Device *pDevice = nullptr;
ID3D11DeviceContext *pDeviceContext= nullptr;
if (!init_directx11(&pDevice, &pDeviceContext))
{
return FALSE;
}
D3D11_TEXTURE2D_DESC desc;
ZeroMemory(&desc, sizeof(D3D11_TEXTURE2D_DESC));
desc.ArraySize = 1;
desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
desc.Format = DXGI_FORMAT_YUY2;
desc.MipLevels = 1;
desc.MiscFlags = 0;
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = 0;
desc.Usage = D3D11_USAGE_DYNAMIC;
desc.Width = 1280;
desc.Height = 720;
ID3D11Texture2D* pTexture2D = nullptr;
HRESULT hr = pDevice->CreateTexture2D(&desc, NULL, &pTexture2D);
D3D11_MAPPED_SUBRESOURCE mappedResource;
ZeroMemory(&mappedResource, sizeof(DXGI_MAPPED_RECT));
hr = pDeviceContext->Map(pTexture2D, 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedResource);
printf("RowPitch = %d\n", mappedResource.RowPitch);
pDeviceContext->Unmap(pTexture2D, 0);
pTexture2D->Release();
pDeviceContext->Release();
pDevice->Release();
getchar();
}
What am I doing wrong here?
This is not necessarily wrong. RowPitch depends on layout the hardware and driver assigned for the texture. The pitch might vary. You are supposed to read the pitch back once the resource is mapped, and use it respectively to read or write the data.
See this thread and message for pitch use code snippet:
The texture resource will have it's own pitch (the number of bytes in a row), which is probably different than the pitch of your source data. This pitch is given to you as the "RowPitch" member of D3D11_MAPPED_SUBRESOURCE. So typically you do something like this:
BYTE* mappedData = reinterpret_cast<BYTE*>(mappedResource.pData);
for(UINT i = 0; i < height; ++i)
{
memcpy(mappedData, buffer, rowspan);
mappedData += mappedResource.RowPitch;
buffer += rowspan;
}

CheckMultisampleQualityLevels(...) says the card does not support MSAA (which is not true for e.g. my GeForce GTX 780)?

I use CheckMultisampleQualityLevels(...) to establish the MSAA support on my hardware. I do it in that order:
D3D11CreateDevice(...) gives me device
device->CheckMultisampleQualityLevels(...)
Pass results to DXGI_SWAP_CHAIN_DESC.SampleDesc
CreateSwapChain(...) with given DXGI_SWAP_CHAIN_DESC
The problem is, CheckMultisampleQualityLevels(...) always gives me 0 for pNumQualityLevels. And I'm sure that my graphic card supports some MSAA (I've tested the program on GeForce gtx 780 and others with the same result).
Did I miss something? Should I call something else before CheckMultisampleQualityLevels(...)?
The code:
Create device:
UINT createDeviceFlags = 0;
#ifdef DEBUG_DIRECTX
createDeviceFlags |= D3D11_CREATE_DEVICE_DEBUG;
#endif
D3D_DRIVER_TYPE driverTypes[] = {
D3D_DRIVER_TYPE_HARDWARE,
D3D_DRIVER_TYPE_WARP,
D3D_DRIVER_TYPE_REFERENCE,
};
std::string driverTypesNames[] = {
"D3D_DRIVER_TYPE_HARDWARE",
"D3D_DRIVER_TYPE_WARP",
"D3D_DRIVER_TYPE_REFERENCE",
};
UINT numDriverTypes = ARRAYSIZE(driverTypes);
D3D_FEATURE_LEVEL featureLevels[] = {
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
};
std::string featureLevelsNames[] = {
"D3D_FEATURE_LEVEL_11_0",
"D3D_FEATURE_LEVEL_10_1",
"D3D_FEATURE_LEVEL_10_0",
};
UINT numFeatureLevels = ARRAYSIZE(featureLevels);
D3D_FEATURE_LEVEL g_featureLevel = D3D_FEATURE_LEVEL_11_0;
for(UINT driverTypeIndex = 0; driverTypeIndex < numDriverTypes; driverTypeIndex++){
driverType = driverTypes[driverTypeIndex];
result = D3D11CreateDevice(NULL, driverType, NULL, createDeviceFlags, featureLevels, numFeatureLevels, D3D11_SDK_VERSION, &device, &g_featureLevel, &context);
if(SUCCEEDED(result)){
LOG(logDEBUG1, "Driver type: " << driverTypesNames[driverTypeIndex] << ".", MOD_GRAPHIC);
break;
}
}
ERROR_HANDLE(SUCCEEDED(result), L"Could not create device (DirectX 11).", MOD_GRAPHIC);
Check multi-sample quality levels (based on vertexwahn.de article):
sampleCountOut = 1;
maxQualityLevelOut = 0;
for(UINT sampleCount = 1; sampleCount <= D3D11_MAX_MULTISAMPLE_SAMPLE_COUNT; sampleCount++){
UINT maxQualityLevel = 0;
HRESULT hr = device->CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM, sampleCount, &maxQualityLevel);
if(maxQualityLevel > 0){
maxQualityLevel--;
}
ERROR_HANDLE(hr == S_OK, L"CheckMultisampleQualityLevels failed.", MOD_GRAPHIC);
if(maxQualityLevel > 0){
LOG(logDEBUG1, "MSAA " << sampleCount << "X supported with " << maxQualityLevel << " quality levels.", MOD_GRAPHIC);
sampleCountOut = sampleCount;
maxQualityLevelOut = maxQualityLevel;
}
}
Swap chain:
DXGI_SWAP_CHAIN_DESC sd;
ZeroMemory(&sd, sizeof(sd));
sd.BufferCount = 1;
sd.BufferDesc.Width = width;
sd.BufferDesc.Height = height;
sd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
sd.BufferDesc.RefreshRate.Numerator = 60;
sd.BufferDesc.RefreshRate.Denominator = 1;
sd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
sd.OutputWindow = *hwnd;
sd.SampleDesc.Count = sampleCount;
sd.SampleDesc.Quality = maxQualityLevel;
sd.Windowed = false;
sd.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH; // allow full-screen switchin
//based on http://stackoverflow.com/questions/27270504/directx-creating-the-swapchain
IDXGIDevice * dxgiDevice = 0;
HRESULT hr = device->QueryInterface(__uuidof(IDXGIDevice), (void **)& dxgiDevice);
ERROR_HANDLE(SUCCEEDED(hr), L"Query for IDXGIDevice failed.", MOD_GRAPHIC);
IDXGIAdapter * dxgiAdapter = 0;
hr = dxgiDevice->GetParent(__uuidof(IDXGIAdapter), (void **)& dxgiAdapter);
ERROR_HANDLE(SUCCEEDED(hr), L"Could not get IDXGIAdapter.", MOD_GRAPHIC);
IDXGIFactory * dxgiFactory = 0;
hr = dxgiAdapter->GetParent(__uuidof(IDXGIFactory), (void **)& dxgiFactory);
ERROR_HANDLE(SUCCEEDED(hr), L"Could not get IDXGIFactory.", MOD_GRAPHIC);
// This system only has DirectX 11.0 installed (let's assume it)
result = dxgiFactory->CreateSwapChain(device, &sd, &swapChain);
LOG(logDEBUG1, "This system only has DirectX 11.0 installed. CreateSwapChain(...) used.", MOD_GRAPHIC);
ERROR_HANDLE(result == S_OK, L"Could not swap chain.", MOD_GRAPHIC);
My ERROR_HANDLE(...) macro never triggers (the first parameter is true in all cases). The log says I use D3D_DRIVER_TYPE_HARDWARE for driver type.
The DirectX Debuggers says (which is some problem, but I don't think it's the reason for CheckMultisampleQualityLevels(...) to gives me wrong results):
DXGI WARNING: IDXGISwapChain::Present: Fullscreen presentation inefficiencies incurred due to application not using IDXGISwapChain::ResizeBuffers appropriately, specifying a DXGI_MODE_DESC not available in IDXGIOutput::GetDisplayModeList, or not using DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH.DXGI_SWAP_CHAIN_DESC::BufferDesc = { 1600, 900, { 60, 1 }, R8G8B8A8_UNORM, 0, 0 }; DXGI_SWAP_CHAIN_DESC::SampleDesc = { 8, 0 }; DXGI_SWAP_CHAIN_DESC::Flags = 0x2; [ MISCELLANEOUS WARNING #98: ]
Your code subtracts 1 from maxQualityLevels before checking to see whether it's greater than zero. An initial value of 1 would suggest it's valid to create the target at quality level 0.
Assuming you want this to work across vendors you only really need to check for it being > 0 and then just create the surface at Quality = 0.
Quality levels > 0 are vendor specific and can mean any number of things to different GPUs. Nvidia's CSAA and AMD's EQAA are both available through non-zero quality levels, but you'd need to look at their own documentation to figure out what each quality level actually means. They're also functionally slightly different to traditional MSAA. "Quality" is a little misleading in the sense that a greater number doesn't necessarily mean greater quality, it would be more appropriate to call it "Mode"
See both:
http://www.nvidia.com/object/coverage-sampled-aa.html
and
http://developer.amd.com/wordpress/media/2012/10/EQAA%2520Modes%2520for%2520AMD%2520HD%25206900%2520Series%2520Cards.pdf

DirectX 11 IDXGISwapChain::GetBuffer failing with DXGI_ERROR_INVALID_CALL

I am creating a device and swap chain in DirectX11, then trying to get the texture of the back-buffer. The creation step appears to work but the GetBuffer call always fails with error DXGI_ERROR_INVALID_CALL (887a0001), regardless of what I do.
Here is the code for creating the device:
D3D_FEATURE_LEVEL featureLevels[] = { D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_10_1, D3D_FEATURE_LEVEL_10_0, D3D_FEATURE_LEVEL_9_1 };
int numFeatureLevels = sizeof(featureLevels) / sizeof(featureLevels[0]);
DXGI_SWAP_CHAIN_DESC swapChainDesc;
ID3D11Device* pDevice;
IDXGISwapChain* pSwapChain;
D3D_FEATURE_LEVEL featureLevel;
ID3D11DeviceContext* pContext;
swapChainDesc.Windowed = TRUE;
swapChainDesc.OutputWindow = (HWND)(void*)pWindowHandle;
swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_SEQUENTIAL;
swapChainDesc.SampleDesc.Count = 1;
swapChainDesc.SampleDesc.Quality = 0;
swapChainDesc.BufferCount = 2;
swapChainDesc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
swapChainDesc.BufferDesc.Width = 0;
swapChainDesc.BufferDesc.Height = 0;
swapChainDesc.BufferDesc.RefreshRate.Numerator = 1;
swapChainDesc.BufferDesc.RefreshRate.Denominator = 60;
swapChainDesc.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
swapChainDesc.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
swapChainDesc.BufferUsage = DXGI_USAGE_SHADER_INPUT|DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDesc.Flags = 0;
err = D3D11CreateDeviceAndSwapChain(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, D3D11_CREATE_DEVICE_DEBUG, featureLevels, numFeatureLevels, D3D11_SDK_VERSION,
&swapChainDesc, &pSwapChain, &pDevice, &featureLevel, &pContext);
if (!SUCCEEDED(err))
{
printf("D3D11CreateDeviceAndSwapChain failed with error %08x\n", err);
return false;
}
m_pDevice = pDevice;
m_pSwapChain = pSwapChain;
m_pDeviceContext = pContext;
m_featureLevel = featureLevel;
ID3D11Texture2D* pTex = NULL;
err = m_pSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**)pTex);
if (!SUCCEEDED(err))
{
printf("GetBuffer failed with error %08x\n", err);
return false;
}
This is Managed C++ which is compiled into a DLL and run from a C# control's OnCreateControl method, which passes Handle into the function as pWindowHandle.
The create device call succeeds, giving me FEATURE_LEVEL_11_0, but the second printf function is always printing error 887a0001. Using the reference device does not help. I'm linking against d3dx11.lib, d3d11.lib, dxgi.lib, dxguid.lib, d3dcompiler.lib, d3d10.lib and d3dx10.lib.
I tried replacing the __uuidof with IID_ID3D11Texture2D and that made no difference.
I am using Visual Studio Express 2013 for Windows Desktop, on Windows 7, and the Microsoft DirectX SDK (June 2010). All x86 and x64, Debug and Release builds suffer from the same problem. My attempts to enable verbose debug output also fail; I have tried to force it on via the DirectX Properties in Control Panel, adding my program to the list of executables, but nothing extra is printed at runtime.
You've just passed the final argument incorrectly. Instead of this:
err = m_pSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**)pTex);
You should be passing a pointer to the interface pointer, like this:
err = m_pSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**)&pTex);
^
|

failure to create a DirectX device and swapchain

I am having issues retrieving a swapchain and device from directx. further info is in the code
void GXDX::StartUp(HWND* mainWindow,int w, int h)
{
//width and height are members of GXDX
width = w; //contains the width
height = h; //contains the height
this->mainWindow = mainWindow; // Is a handle to the main window. it is usually something
//like : unusual -735313406
ID3D10Texture2D *backBufferSurface;
DXGI_SWAP_CHAIN_DESC swapChainDesc;
swapChainDesc.BufferCount = 2;
swapChainDesc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
swapChainDesc.BufferDesc.RefreshRate.Numerator = 60;
swapChainDesc.BufferDesc.RefreshRate.Denominator = 1;
swapChainDesc.BufferDesc.Width = width;
swapChainDesc.BufferDesc.Height = height;
swapChainDesc.SampleDesc.Count = 1;
swapChainDesc.SampleDesc.Quality = 0;
swapChainDesc.OutputWindow = *mainWindow;
swapChainDesc.Windowed = TRUE;
D3D10_DRIVER_TYPE driverType = D3D10_DRIVER_TYPE_HARDWARE;
HRESULT hr = D3D10CreateDeviceAndSwapChain(NULL,driverType,NULL,0,
D3D10_SDK_VERSION, &swapChainDesc,&swapChain,&dxDevice);
if(FAILED(hr))//Error is here
throw GXVideoException(L"Problems retrieving directX device");
}
Below is all the values given after the method has finished
- &dxDevice 0x00e74b04 ID3D10Device * *
- 0x00000000 ID3D10Device *
- IUnknown {...} IUnknown
__vfptr CXX0030: Error: expression cannot be evaluated
- &swapChain 0x00e74b08 IDXGISwapChain * *
- 0x00000000 IDXGISwapChain *
- IDXGIDeviceSubObject {...} IDXGIDeviceSubObject
- IDXGIObject {...} IDXGIObject
- IUnknown {...} IUnknown
__vfptr CXX0030: Error: expression cannot be evaluated
- &swapChainDesc 0x002df90c {BufferDesc={...} SampleDesc={...} BufferUsage=0xcccccccc ...} DXGI_SWAP_CHAIN_DESC *
- BufferDesc {Width=0x00000320 Height=0x00000258 RefreshRate={...} ...} DXGI_MODE_DESC
Width 800 unsigned int
Height 600 unsigned int
- RefreshRate {Numerator=60 Denominator=1 } DXGI_RATIONAL
Numerator 60 unsigned int
Denominator 1 unsigned int
Format DXGI_FORMAT_R8G8B8A8_UNORM DXGI_FORMAT
ScanlineOrdering -858993460 DXGI_MODE_SCANLINE_ORDER
Scaling -858993460 DXGI_MODE_SCALING
- SampleDesc {Count=0x00000001 Quality=0x00000000 } DXGI_SAMPLE_DESC
Count 1 unsigned int
Quality 0 unsigned int
BufferUsage 3435973836 unsigned int
BufferCount 2 unsigned int
- OutputWindow 0x008b08ca {unused=-665779669 } HWND__ *
unused -665779669 int
Windowed 1 int
SwapEffect -858993460 DXGI_SWAP_EFFECT
Flags 3435973836 unsigned int
driverType D3D10_DRIVER_TYPE_HARDWARE D3D10_DRIVER_TYPE
hr 0x887a0001 HRESULT
- this 0x00e74af0 {dxDevice=0x00000000 swapChain=0x00000000 } GXDX * const
- GXRenderer {running=true width=0x00000320 height=0x00000258 ...} GXRenderer
- __vfptr 0x013277dc const GXDX::`vftable' *
[0] 0x0132110e GXDX::Render(void) *
[0x1] 0x013211d6 GXDX::StartUp(struct HWND__ * *,int,int) *
[0x2] 0x01321041 GXDX::SetupScene(void) *
[0x3] 0x01321069 GXDX::DisplayScene(void) *
running true bool
width 0x00000320 int
height 0x00000258 int
- mainWindow 0x0132a214 struct HWND__ * GXRenderManager::mainWindow {unused=0x008b08ca } HWND__ *
unused 0x008b08ca int
- dxDevice 0x00000000 ID3D10Device *
+ IUnknown {...} IUnknown
- swapChain 0x00000000 IDXGISwapChain *
- IDXGIDeviceSubObject {...} IDXGIDeviceSubObject
- IDXGIObject {...} IDXGIObject
- IUnknown {...} IUnknown
__vfptr CXX0030: Error: expression cannot be evaluated
[EDIT]
Prior to Goz response, I checked out further debug detail and this is what was recieved
DXGI Error: IDXGIFactory::CreateSwapChain: SwapEffect is unknown.
which I am guessing I did not add the swapeffect attributes. I will do that and check it out
Silly me. earlier I mentioned in a comment that both books I was reading did not include a swapchain effect property. I knew something was off regardless of if I included the swap chain or not.
But I also notice that both books zeroed out the swap chain description. Making it more safe to leave out properties. So I added the following
SecureZeroMemory(&swapChainDesc, sizeof(swapChainDesc));
and everything worked. Set aside this, I should still add a swapchain effect property. But for reason's the book decided not to, i have not figured out yet.
The following code worked for me (I'm using Microsoft Visual Studio Express 2012 for Windows Desktop)
DXGI_SWAP_CHAIN_DESC swapChainDesc;
swapChainDesc.BufferDesc.Width = width;
swapChainDesc.BufferDesc.Height = height;
swapChainDesc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
swapChainDesc.BufferDesc.RefreshRate.Numerator = 60;
swapChainDesc.BufferDesc.RefreshRate.Denominator = 1;
swapChainDesc.SampleDesc.Count = 1;
swapChainDesc.SampleDesc.Quality = 0;
swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDesc.BufferCount = 1;
swapChainDesc.OutputWindow = hWnd;
swapChainDesc.Windowed = true;
swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
swapChainDesc.Flags = 0;
ID3D11Device *pDevice = 0;
ID3D11DeviceContext *pContext = 0;
IDXGISwapChain *pSwapChain = 0;
HRESULT result;
result = D3D11CreateDeviceAndSwapChain(NULL, driverType, NULL, D3D11_CREATE_DEVICE_SINGLETHREADED, featureLevel, totalFeatureLevel, D3D11_SDK_VERSION, &swapChainDesc, &pSwapChain, &pDevice, &currentFeatureLevel, &pContext);
So you can just try adding more attributes to the swapChainDesc.
Try not passing a pointer to your HWND through. You should pass an HWND as a non-pointer. Other than that I don't see anything massively wrong.
This is what DXErrorLookup says about your error:
HRESULT: 0x887a0001 (2289696769)
Name: DXGI_ERROR_INVALID_CALL
Description: The application has made an erroneous API call that it had enough
information to avoid. This error is intended to denote that the application should be
altered to avoid the error. Use of the debug version of the DXGI.DLL will provide run-
time debug output with further information.
Severity code: Failed
Facility Code: FACILITY_DXGI (2170)
Error Code: 0x0001 (1)
So have you considered using the debug version of DXGI to see what the error is?
Btw my working DX10 initialisation is as follows (Warning a LOT of code!):
HRESULT hr = S_OK;
// Wrong init params passed in.
if ( pParams->paramSize != sizeof( D3D10InitParams ) )
return false;
// Upgrade the initparams to the correct version
mInitParams = *(D3D10InitParams*)pParams;
// Create factory.
IDXGIFactory* pFactory = NULL;
if ( FAILED( CreateDXGIFactory( __uuidof( IDXGIFactory ), (void**)&pFactory ) ) )
{
return false;
}
if ( FAILED( pFactory->MakeWindowAssociation( mInitParams.hWnd, 0 ) ) )
{
return false;
}
HWND hTemp;
pFactory->GetWindowAssociation( &hTemp );
// Enumerate adapters.
unsigned int count = 0;
IDXGIAdapter * pAdapter;
std::vector<IDXGIAdapter*> vAdapters;
while( pFactory->EnumAdapters( count, &pAdapter ) != DXGI_ERROR_NOT_FOUND )
{
vAdapters.push_back( pAdapter );
count++;
}
unsigned int selectedAdapter = mInitParams.display;
if ( vAdapters.size() > 1 )
{
// Need to handle multiple available adapters.
}
// Release all other adapters.
count = 0;
unsigned int max = (unsigned int)vAdapters.size();
while( count < max )
{
if ( count != selectedAdapter )
{
vAdapters[count]->Release();
}
count++;
}
// Device should support all basic DX10 features.
// Caps does not support enough basic features.
//if ( !CheckCaps( &caps ) )
// return false;
// Create the D3D 10 device.
DXGI_MODE_DESC dxgiModeDesc;
dxgiModeDesc.Width = mInitParams.width;
dxgiModeDesc.Height = mInitParams.height;
dxgiModeDesc.RefreshRate.Numerator = (mInitParams.refreshRate == 0) ? 60 : mInitParams.refreshRate;
dxgiModeDesc.RefreshRate.Denominator = 1;
dxgiModeDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
dxgiModeDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_PROGRESSIVE;
dxgiModeDesc.Scaling = DXGI_MODE_SCALING_CENTERED;
DXGI_SAMPLE_DESC dxgiSampleDesc;
dxgiSampleDesc.Count = 1;
dxgiSampleDesc.Quality = 0;
//DXGI_USAGE dxgiUsage;
//dxgiUsage.
DXGI_SWAP_CHAIN_DESC dxgiSwapChainDesc;
dxgiSwapChainDesc.BufferDesc = dxgiModeDesc;
dxgiSwapChainDesc.SampleDesc = dxgiSampleDesc;
dxgiSwapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
dxgiSwapChainDesc.BufferCount = 2;
dxgiSwapChainDesc.OutputWindow = mInitParams.hWnd;
dxgiSwapChainDesc.Windowed = mInitParams.windowed;
dxgiSwapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
dxgiSwapChainDesc.Flags = 0;//DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH;
// Set the device as a debug device when compiled for debug.
#ifdef _DEBUG
unsigned int flags = D3D10_CREATE_DEVICE_DEBUG;
#else
unsigned int flags = 0;
#endif
mpAdapter = vAdapters[selectedAdapter];
// Create the device and swap chain.
if ( FAILED( D3D10CreateDeviceAndSwapChain( mpAdapter, D3D10_DRIVER_TYPE_HARDWARE, NULL, flags, D3D10_SDK_VERSION, &dxgiSwapChainDesc, &mpSwapChain, &mpDevice ) ) )
{
return false;
}
// Get the back buffer.
ID3D10Texture2D* pBuffer = NULL;
if ( FAILED( mpSwapChain->GetBuffer( 0, __uuidof( ID3D10Texture2D ), (void**)&pBuffer ) ) )
{
return false;
}
// Create the default render target view.
hr = mpDevice->CreateRenderTargetView( pBuffer, NULL, &mDefaultRenderTarget );
pBuffer->Release();
if ( FAILED( hr ) )
{
return false;
}
// Create depth stencil texture
D3D10_TEXTURE2D_DESC descDepth;
descDepth.Width = mInitParams.width;
descDepth.Height = mInitParams.height;
descDepth.MipLevels = 1;
descDepth.ArraySize = 1;
descDepth.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
descDepth.SampleDesc.Count = 1;
descDepth.SampleDesc.Quality = 0;
descDepth.Usage = D3D10_USAGE_DEFAULT;
descDepth.BindFlags = D3D10_BIND_DEPTH_STENCIL;
descDepth.CPUAccessFlags = 0;
descDepth.MiscFlags = 0;
if ( FAILED( mpDevice->CreateTexture2D( &descDepth, NULL, &mpDepthStencilTex ) ) )
{
return false;
}
// Create the depth stencil view
D3D10_DEPTH_STENCIL_VIEW_DESC descDSV;
descDSV.Format = descDepth.Format;
descDSV.ViewDimension = D3D10_DSV_DIMENSION_TEXTURE2D;
descDSV.Texture2D.MipSlice = 0;
if ( FAILED( mpDevice->CreateDepthStencilView( mpDepthStencilTex, &descDSV, &mDefaultDepthStencilTarget ) ) )
{
return false;
}
// Set the default render targets.
mpDevice->OMSetRenderTargets( 1, &mDefaultRenderTarget, mDefaultDepthStencilTarget );
mpEffectDevice = new D3D10EffectStateDevice( GetDevice() );
// Set the default render states.
SetupRenderStates();
// Set the default viewport.
D3D10_VIEWPORT d3d10ViewPort;
d3d10ViewPort.Width = mInitParams.width;
d3d10ViewPort.Height = mInitParams.height;
d3d10ViewPort.TopLeftX = 0;
d3d10ViewPort.TopLeftY = 0;
d3d10ViewPort.MinDepth = 0.0f;
d3d10ViewPort.MaxDepth = 1.0f;
GetDevice()->RSSetViewports( 1, &d3d10ViewPort );
I hope thats some help!