DirectCompute CreateBuffer fails with error 0x80070057 (E_INVALIDARG) - c++

I'm trying to create a buffer in GPU memory to upload data from CPU. GPU access will be readonly. Data will be used as an input buffer for a compute shader.
CreateBuffer() fails with error 0x80070057 (E_INVALIDARG). I read the docs and read it again without discovering which argument cause the failure.
Here is an extract from my code where I marked the failure:
HRESULT hr = S_OK;
RECT rc;
GetClientRect( g_hWnd, &rc );
UINT width = rc.right - rc.left;
UINT height = rc.bottom - rc.top;
UINT createDeviceFlags = 0;
#ifdef _DEBUG
createDeviceFlags |= D3D11_CREATE_DEVICE_DEBUG;
#endif
D3D_DRIVER_TYPE driverTypes[] =
{
#ifdef WARP
D3D_DRIVER_TYPE_REFERENCE,
#else
D3D_DRIVER_TYPE_HARDWARE,
#endif
};
UINT numDriverTypes = sizeof( driverTypes ) / sizeof( driverTypes[0] );
DXGI_SWAP_CHAIN_DESC sd;
ZeroMemory( &sd, sizeof( sd ) );
sd.BufferCount = 1;
sd.BufferDesc.Width = width;
sd.BufferDesc.Height = height;
sd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
sd.BufferDesc.RefreshRate.Numerator = 60;
sd.BufferDesc.RefreshRate.Denominator = 1;
sd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT | DXGI_USAGE_UNORDERED_ACCESS | DXGI_USAGE_SHADER_INPUT;
sd.OutputWindow = g_hWnd;
sd.SampleDesc.Count = 1;
sd.SampleDesc.Quality = 0;
sd.Windowed = TRUE;
D3D_FEATURE_LEVEL FeatureLevels = D3D_FEATURE_LEVEL_11_0;
for( UINT driverTypeIndex = 0; driverTypeIndex < numDriverTypes; driverTypeIndex++ )
{
g_driverType = driverTypes[driverTypeIndex];
hr = D3D11CreateDeviceAndSwapChain( NULL, g_driverType, NULL, createDeviceFlags, &FeatureLevels,1,
D3D11_SDK_VERSION, &sd, &g_pSwapChain, &g_pd3dDevice, NULL, &g_pImmediateContext );
if( SUCCEEDED( hr ) )
break;
}
if( FAILED( hr ) )
return hr;
// check if GPU supports doubles
D3D11_FEATURE_DATA_DOUBLES fdDoubleSupport;
g_pd3dDevice->CheckFeatureSupport( D3D11_FEATURE_DOUBLES, &fdDoubleSupport, sizeof(fdDoubleSupport) );
GPUcanDoDoubles = fdDoubleSupport.DoublePrecisionFloatShaderOps;
D3D11_BUFFER_DESC desc;
BYTE Data[200];
D3D11_SUBRESOURCE_DATA InitData;
desc.BindFlags = D3D11_BIND_UNORDERED_ACCESS | D3D11_BIND_SHADER_RESOURCE;
desc.Usage = D3D11_USAGE_DYNAMIC;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
desc.ByteWidth = 200;
desc.MiscFlags = D3D11_RESOURCE_MISC_BUFFER_STRUCTURED;
desc.StructureByteStride = 2;
InitData.pSysMem = Data;
hr = g_pd3dDevice->CreateBuffer(&desc, &InitData, &g_pcbFractal); // <== E_INVALIARG here
// Create constant buffer
D3D11_BUFFER_DESC Desc;
Desc.Usage = D3D11_USAGE_DYNAMIC;
Desc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
Desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
Desc.MiscFlags = 0;
Desc.ByteWidth = ((( (GPUcanDoDoubles) ? sizeof(MandelConstantsDoubles) : sizeof(MandelConstantsNoDoubles) ) + 15)/16)*16; // must be multiple of 16 bytes
hr = g_pd3dDevice->CreateBuffer( &Desc, NULL, &g_pcbFractal); // This one succeed
Any help appreciated.

Enabling Direct3D 11 Device Debugging and looking at the debug output window answers your question:
D3D11 ERROR: ID3D11Device::CreateBuffer: When creating a buffer with the MiscFlag D3D11_RESOURCE_MISC_BUFFER_STRUCTURED specified, the StructureByteStride must be greater than zero, no greater than 2048, and a multiple of 4. [ STATE_CREATION ERROR #2097339: CREATEBUFFER_INVALIDSTRUCTURESTRIDE]
If you fix that, you get:
D3D11 ERROR: ID3D11Device::CreateBuffer: A D3D11_USAGE_DYNAMIC Resource cannot be bound to certain parts of the graphics pipeline, but must have at least one BindFlags bit set. The BindFlags bits (0x88) have the following settings: D3D11_BIND_STREAM_OUTPUT (0), D3D11_BIND_RENDER_TARGET (0), D3D11_BIND_DEPTH_STENCIL (0), D3D11_BIND_UNORDERED_ACCESS (1). [ STATE_CREATION ERROR #64: CREATEBUFFER_INVALIDBINDFLAGS]
Which is basically telling you that you can't combine D3D11_USAGE_DYNAMIC with D3D11_BIND_UNORDERED_ACCESS

Related

I can't assign a texture to IMGUI because I can't create directx texture

I'm trying to add some images for using with IMGUI library. Basically, IMGUI can get directx or OpenGL raw texture data to draw. You can see example code from IMGUI for creating a texture ;
// Simple helper function to load an image into a DX11 texture with common settings
bool LoadTextureFromFile(const char* filename, ID3D11ShaderResourceView** out_srv, int* out_width, int* out_height)
{
// Load from disk into a raw RGBA buffer
int image_width = 0;
int image_height = 0;
unsigned char* image_data = stbi_load(filename, &image_width, &image_height, NULL, 4);
if (image_data == NULL)
return false;
// Create texture
D3D11_TEXTURE2D_DESC desc;
ZeroMemory(&desc, sizeof(desc));
desc.Width = image_width;
desc.Height = image_height;
desc.MipLevels = 1;
desc.ArraySize = 1;
desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
desc.SampleDesc.Count = 1;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
desc.CPUAccessFlags = 0;
ID3D11Texture2D *pTexture = NULL;
D3D11_SUBRESOURCE_DATA subResource;
subResource.pSysMem = image_data;
subResource.SysMemPitch = desc.Width * 4;
subResource.SysMemSlicePitch = 0;
g_pd3dDevice->CreateTexture2D(&desc, &subResource, &pTexture);
// Create texture view
D3D11_SHADER_RESOURCE_VIEW_DESC srvDesc;
ZeroMemory(&srvDesc, sizeof(srvDesc));
srvDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
srvDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
srvDesc.Texture2D.MipLevels = desc.MipLevels;
srvDesc.Texture2D.MostDetailedMip = 0;
g_pd3dDevice->CreateShaderResourceView(pTexture, &srvDesc, out_srv);
pTexture->Release();
*out_width = image_width;
*out_height = image_height;
stbi_image_free(image_data);
return true;
}
Everything is ok except 'g_pd3dDevice->CreateTexture2D(&desc, &subResource, &pTexture);' line. Because as you can see g_pd3dDevice is undefined. That's why I put a line for this like : ID3D11Device* g_pd3dDevice = nullptr; but when I run code it hangs.. I guess I must create a 3ddevice before using this part. But I can't. Any suggestions? Thanks
Now I can get some image but it's wrong:
Now my code like this ;
bool LoadTextureFromBufferDX11(unsigned char* address, ID3D11ShaderResourceView** out_srv, int* out_width, int* out_height)
{
WNDCLASSEX wc = { sizeof(WNDCLASSEX), CS_CLASSDC, WndProc, 0L, 0L, GetModuleHandle(NULL), NULL, NULL, NULL, NULL, _T("ImGui Example"), NULL };
::RegisterClassEx(&wc);
//HWND hwnd = ::CreateWindow(wc.lpszClassName, _T("Dear ImGui DirectX11 Example"), WS_OVERLAPPEDWINDOW, 100, 100, 200, 200, NULL, NULL, wc.hInstance, NULL);
HWND hwnd = GetForegroundWindow();
CreateDeviceD3D(hwnd);
/*
if (!CreateDeviceD3D(hwnd))
{
CleanupDeviceD3D();
::UnregisterClass(wc.lpszClassName, wc.hInstance);
return 1;
}
*/
//ImGui_ImplDX11_Init(g_pd3dDevice, g_pd3dDeviceContext);
// Load from disk into a raw RGBA buffer
int image_width = 0;
int image_height = 0;
const char* filename = "e:\\obama.png";
unsigned char* image_data = stbi_load(filename, &image_width, &image_height, NULL, 4);
//unsigned char* image_data = address;
if (image_data == NULL)
return false;
// Create texture
D3D11_TEXTURE2D_DESC desc;
ZeroMemory(&desc, sizeof(desc));
desc.Width = image_width;
desc.Height = image_height;
desc.MipLevels = 1;
desc.ArraySize = 1;
desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
desc.SampleDesc.Count = 1;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
desc.CPUAccessFlags = 0;
ID3D11Texture2D *pTexture = NULL;
D3D11_SUBRESOURCE_DATA subResource;
subResource.pSysMem = image_data;
subResource.SysMemPitch = desc.Width * 4;
subResource.SysMemSlicePitch = 0;
g_pd3dDevice->CreateTexture2D(&desc, &subResource, &pTexture);
// Create texture view
D3D11_SHADER_RESOURCE_VIEW_DESC srvDesc;
ZeroMemory(&srvDesc, sizeof(srvDesc));
srvDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
srvDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
srvDesc.Texture2D.MipLevels = desc.MipLevels;
srvDesc.Texture2D.MostDetailedMip = 0;
g_pd3dDevice->CreateShaderResourceView(pTexture, &srvDesc, out_srv);
pTexture->Release();
*out_width = image_width;
*out_height = image_height;
return true;
}
And I trigger this function from host app.
static ID3D11ShaderResourceView* my_texture = NULL;
int my_image_width = 0;
int my_image_height = 0;
fn_export double imgui_showimage(unsigned char* address) {
bool ret = LoadTextureFromBufferDX11(address, &my_texture, &my_image_width, &my_image_height);
//IM_ASSERT(ret);
//ImGui::Image((void*)my_texture, ImVec2(my_image_width, my_image_height));
return 1;
}
And I draw imgu image from host like ;
fn_export double imgui_image(char* address) {
ImGui::Image((void*)my_texture, ImVec2(my_image_width, my_image_height));
return 1;
}

copy color and depthstencil buffer for later use

I'm new to directX and my task is to copy the current depthstencil and color buffer into a texture. Later this textures will be back copied into the color/depthstencil buffer to render on the old scene without rendering the hole scene twice.
This code generates the rendertarget:
bool CGraphicsDriverDX11::CreateRenderTargetTexture(UINT nWidth, UINT nHeight, DXGI_FORMAT Format,
ID3D11Texture2D** ppRenderTargetTexture, ID3D11RenderTargetView** ppRenderTargetView,
ID3D11ShaderResourceView** ppRenderTargetSRV, bool bMultiSample)
{
D3D11_TEXTURE2D_DESC TextureDesc;
ZeroMemory(&TextureDesc, sizeof(TextureDesc));
TextureDesc.Width = nWidth;
TextureDesc.Height = nHeight;
TextureDesc.MipLevels = 1;
TextureDesc.ArraySize = 1;
TextureDesc.Format = Format;
if (bMultiSample)
{
TextureDesc.SampleDesc.Count = m_nMultiSampleCount;
TextureDesc.SampleDesc.Quality = m_nMultiSampleQuality;
}
else
{
TextureDesc.SampleDesc.Count = 1;
TextureDesc.SampleDesc.Quality = 0;
}
TextureDesc.Usage = D3D11_USAGE_DEFAULT;
TextureDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
TextureDesc.CPUAccessFlags = 0;
TextureDesc.MiscFlags = 0;
HRESULT hr = m_pD3D11Device->CreateTexture2D(&TextureDesc, nullptr, ppRenderTargetTexture);
if (FAILED(hr))
{
DebugAssertOnce(UNABLE_TO_CREATE_RENDER_TARGET_TEXTURE);
return false;
}
hr = m_pD3D11Device->CreateRenderTargetView(*ppRenderTargetTexture, nullptr, ppRenderTargetView);
if (FAILED(hr))
{
DebugAssertOnce(UNABLE_TO_CREATE_RENDER_TARGET_VIEW);
return false;
}
if (ppRenderTargetSRV)
{
D3D11_SHADER_RESOURCE_VIEW_DESC SRVDesc;
ZeroMemory(&SRVDesc, sizeof(SRVDesc));
SRVDesc.Format = TextureDesc.Format;
SRVDesc.Texture2D.MipLevels = TextureDesc.MipLevels;
SRVDesc.Texture2D.MostDetailedMip = 0;
SRVDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
hr = m_pD3D11Device->CreateShaderResourceView(*ppRenderTargetTexture, &SRVDesc, ppRenderTargetSRV);
if (FAILED(hr))
{
DebugAssertOnce(UNABLE_TO_CREATE_SHADER_RESOURCE_VIEW);
return false;
}
}
return true;
}
This code generates the depthbuffer
bool CGraphicsDriverDX11::CreateDepthTexture(UINT nWidth, UINT nHeight, DXGI_FORMAT Format,
ID3D11Texture2D** ppDepthStencilTexture, ID3D11DepthStencilView** ppDepthStencilView,
ID3D11ShaderResourceView** ppDepthStencilSRV, bool bMultiSample)
{
D3D11_TEXTURE2D_DESC TextureDesc;
ZeroMemory(&TextureDesc, sizeof(TextureDesc));
TextureDesc.Width = nWidth;
TextureDesc.Height = nHeight;
TextureDesc.MipLevels = 1;
TextureDesc.ArraySize = 1;
TextureDesc.Format = Format;
if (bMultiSample)
{
TextureDesc.SampleDesc.Count = m_nMultiSampleCount;
TextureDesc.SampleDesc.Quality = m_nMultiSampleQuality;
}
else
{
TextureDesc.SampleDesc.Count = 1;
TextureDesc.SampleDesc.Quality = 0;
}
TextureDesc.Usage = D3D11_USAGE_DEFAULT;
TextureDesc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
TextureDesc.CPUAccessFlags = 0;
TextureDesc.MiscFlags = 0;
HRESULT hr = m_pD3D11Device->CreateTexture2D(&TextureDesc, nullptr, ppDepthStencilTexture);
if (FAILED(hr))
{
DebugAssertOnce(UNABLE_TO_CREATE_DEPTHBUFFER_TEXTURE);
return false;
}
m_pD3D11Device->CreateDepthStencilView(*ppDepthStencilTexture, nullptr, ppDepthStencilView);
if (FAILED(hr))
{
DebugAssertOnce(UNABLE_TO_CREATE_DEPTHBUFFER_VIEW);
return false;
}
if (ppDepthStencilSRV)
{
D3D11_SHADER_RESOURCE_VIEW_DESC SRVDesc;
ZeroMemory(&SRVDesc, sizeof(SRVDesc));
SRVDesc.Format = TextureDesc.Format;
SRVDesc.Texture2D.MipLevels = TextureDesc.MipLevels;
SRVDesc.Texture2D.MostDetailedMip = 0;
SRVDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
hr = m_pD3D11Device->CreateShaderResourceView(*ppDepthStencilTexture, &SRVDesc, ppDepthStencilSRV);
if (FAILED(hr))
{
DebugAssertOnce(UNABLE_TO_CREATE_SHADER_RESOURCE_VIEW);
return false;
}
}
return true;
}
now I try to make a copy of it:
ResolveSubresource(GetZBufferCopyTexture(), 0, GetDepthStencilBufferTexture(), 0, DXGI_FORMAT_D24_UNORM_S8_UINT);
ResolveSubresource(GetColorCopyTexture(), 0, GetBackBuffer(), 0, DXGI_FORMAT_R8G8B8A8_UNORM);
and also try to copy the copy back to the rendertarget/depthstencil
ResolveSubresource(GetDepthStencilBufferTexture(), 0, GetZBufferCopyTexture(), 0, DXGI_FORMAT_D24_UNORM_S8_UINT);
ResolveSubresource(GetBackBuffer(), 0, GetColorCopyTexture(), 0, DXGI_FORMAT_R8G8B8A8_UNORM);
but this does not work correctly. I see no changes. Maybe my understanding how directx11 works is completely wrong.
I did this with OpenGL, there I only had to copy the FramebufferObject with the blitframebuffer command and it worked very well. It was the same project, so I'm sure that I call these commands in the right order. But directx11 is completely new to me
EDIT:
I also changed the command "ResolveSubresource" to "CopyResource" but also no changes
I found the mistake:
I used the wrong textures...
Now it works very fine, BTW I use the "CopyResource" command, because the "ResolveSubresource" only copys a multisampled resource into a non-multisampled resource

DXGI_FORMAT_YUY2 textures return different RowPitch under Windows 8.1 and Windows 10

My build environment is as follows:
Windows 8.1, VS2012, desktop application built using windows 8.0 SDK and C++.
When I run my program on windows 8.1 the RowPitch prints 2560 but under windows 10 the same program prints 5120.
What am I doing wrong here?
Here is the code, Thanks for all the replies.
#include <d3d11.h>
static bool init_directx11(ID3D11Device **pDevice, ID3D11DeviceContext **pDeviceContext)
{
D3D_FEATURE_LEVEL featureLevels[] = {D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_10_1, D3D_FEATURE_LEVEL_10_0, D3D_FEATURE_LEVEL_9_1};
D3D_FEATURE_LEVEL featureLevel;
UINT createDeviceFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
HRESULT hr = D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, createDeviceFlags, featureLevels, ARRAYSIZE(featureLevels), D3D11_SDK_VERSION, pDevice,
&featureLevel, pDeviceContext);
return SUCCEEDED(hr);
}
int _tmain(int argc, _TCHAR* argv[])
{
ID3D11Device *pDevice = nullptr;
ID3D11DeviceContext *pDeviceContext= nullptr;
if (!init_directx11(&pDevice, &pDeviceContext))
{
return FALSE;
}
D3D11_TEXTURE2D_DESC desc;
ZeroMemory(&desc, sizeof(D3D11_TEXTURE2D_DESC));
desc.ArraySize = 1;
desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
desc.Format = DXGI_FORMAT_YUY2;
desc.MipLevels = 1;
desc.MiscFlags = 0;
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = 0;
desc.Usage = D3D11_USAGE_DYNAMIC;
desc.Width = 1280;
desc.Height = 720;
ID3D11Texture2D* pTexture2D = nullptr;
HRESULT hr = pDevice->CreateTexture2D(&desc, NULL, &pTexture2D);
D3D11_MAPPED_SUBRESOURCE mappedResource;
ZeroMemory(&mappedResource, sizeof(DXGI_MAPPED_RECT));
hr = pDeviceContext->Map(pTexture2D, 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedResource);
printf("RowPitch = %d\n", mappedResource.RowPitch);
pDeviceContext->Unmap(pTexture2D, 0);
pTexture2D->Release();
pDeviceContext->Release();
pDevice->Release();
getchar();
}
What am I doing wrong here?
This is not necessarily wrong. RowPitch depends on layout the hardware and driver assigned for the texture. The pitch might vary. You are supposed to read the pitch back once the resource is mapped, and use it respectively to read or write the data.
See this thread and message for pitch use code snippet:
The texture resource will have it's own pitch (the number of bytes in a row), which is probably different than the pitch of your source data. This pitch is given to you as the "RowPitch" member of D3D11_MAPPED_SUBRESOURCE. So typically you do something like this:
BYTE* mappedData = reinterpret_cast<BYTE*>(mappedResource.pData);
for(UINT i = 0; i < height; ++i)
{
memcpy(mappedData, buffer, rowspan);
mappedData += mappedResource.RowPitch;
buffer += rowspan;
}

DirectX 11 CreateSwapChain() fails with error DXGI_ERROR_INVALID_CALL

Everytime I try to create the swapChain it throws this error.
After hours searching for a fix for this I found nothing that worked for me.
Here's the important part of the code:
bool Direct3D::Initialize(HWND hWnd)
{
HRESULT hResult;
ID3D11Device* pDevice = NULL;
ID3D11DeviceContext* pDeviceContext = NULL;
IDXGIDevice* pDXGIDevice = NULL;
IDXGIAdapter* pAdapter = NULL;
IDXGIFactory* pFactory = NULL;
IDXGISwapChain* pSwapChain = NULL;
D3D_FEATURE_LEVEL featureLevels[] = { //Add feature levels to support here
D3D_FEATURE_LEVEL_11_0
};
#ifdef _DEBUG
UINT deviceFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT | D3D11_CREATE_DEVICE_DEBUG;
#else
UINT deviceFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
#endif
//Create the device and deviceContext
hResult = D3D11CreateDevice(NULL, //needs to be NULL if D3D_DRIVER_TYPE_HARDWARE is used; NULL takes the default adapter
D3D_DRIVER_TYPE_HARDWARE,
NULL, //needs to be not NULL if D3D_DRIVER_TYPE_SOFTWARE is used
deviceFlags,
featureLevels,
ARRAYSIZE(featureLevels),
D3D11_SDK_VERSION,
&pDevice,
NULL,
&pDeviceContext);
if (FAILED(hResult))
{
return false;
}
hResult = pDevice->QueryInterface(__uuidof(IDXGIDevice), (void**)&pDXGIDevice);
if (FAILED(hResult))
{
return false;
}
hResult = pDXGIDevice->GetAdapter(&pAdapter);
if (FAILED(hResult))
{
return false;
}
hResult = pAdapter->GetParent(__uuidof(IDXGIFactory), (void**)&pFactory);
if (FAILED(hResult))
{
return false;
}
DXGI_MODE_DESC bufferDesc;
ZeroMemory(&bufferDesc, sizeof(DXGI_MODE_DESC));
bufferDesc.Width = 0; //Zero for evaluating it from the output window
bufferDesc.Height = 0; //Zero for evaluating it from the output window
bufferDesc.RefreshRate.Numerator = config.refreshRate;
bufferDesc.RefreshRate.Denominator = 1;
bufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
bufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
bufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
DXGI_SWAP_CHAIN_DESC swapChainDesc;
ZeroMemory(&swapChainDesc, sizeof(DXGI_SWAP_CHAIN_DESC));
swapChainDesc.BufferDesc = bufferDesc;
swapChainDesc.SampleDesc.Count = 1;
swapChainDesc.SampleDesc.Quality = 0;
swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDesc.BufferCount = 1;
swapChainDesc.OutputWindow = hWnd;
swapChainDesc.Windowed = config.fullscreen;
swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL;
swapChainDesc.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH | DXGI_SWAP_CHAIN_FLAG_DISPLAY_ONLY;
hResult = pFactory->CreateSwapChain(pDevice, &swapChainDesc, &pSwapChain);
CGE_SAFE_RELEASE(pDXGIDevice);
CGE_SAFE_RELEASE(pAdapter);
CGE_SAFE_RELEASE(pFactory);
if (FAILED(hResult))
{
return false;
}
return true;
}
Looking at the documentation for CreateSwapChain() it seems to be that pSwapChain has to be not NULL, but that doesn't make sense in my opinion because I want to specify pSwapChain with CreateSwapChain().
Does anyone know a solution for this problem?
Your IDXGISwapChain should be associated with SwapChainPanel XAML control(or HWND if you are running Win32 application). You can do initialization like this:
hr = dxgiFactory2->CreateSwapChainForHwnd( g_pd3dDevice, g_hWnd, &sd, nullptr, nullptr, &g_pSwapChain1 );
if (SUCCEEDED(hr))
{
hr = g_pSwapChain1->QueryInterface( __uuidof(IDXGISwapChain), reinterpret_cast<void**>(&g_pSwapChain) );
}
This code is from microsoft Win32 DirectX sample.
https://code.msdn.microsoft.com/windowsdesktop/Direct3D-Tutorial-Win32-829979ef/view/Discussions#content
If you are running WinRT application you can look trough DirectX and XAML application template.
You are passing in the address of your Swap Chain Pointer. This is so the Create device and swap chain function can fill that pointer out with information. Here is a example.
//loop through our driver types till we find the one we will be using
for (unsigned int i = 0; i < DriverCount; i++)
{
//Create our device and swap chain
DXERROR = D3D11CreateDeviceAndSwapChain(nullptr, drivers[i], nullptr,
Flag, levels, LevelsCount, D3D11_SDK_VERSION, &SwapDesc, &DX.pSwapChain,
&DX.pDevice, &DX.FeatureLevel, &DX.pImmediateContext);
if (SUCCEEDED(DXERROR))
{
DX.DriverType = drivers[i];
break;
}
}

failure to create a DirectX device and swapchain

I am having issues retrieving a swapchain and device from directx. further info is in the code
void GXDX::StartUp(HWND* mainWindow,int w, int h)
{
//width and height are members of GXDX
width = w; //contains the width
height = h; //contains the height
this->mainWindow = mainWindow; // Is a handle to the main window. it is usually something
//like : unusual -735313406
ID3D10Texture2D *backBufferSurface;
DXGI_SWAP_CHAIN_DESC swapChainDesc;
swapChainDesc.BufferCount = 2;
swapChainDesc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
swapChainDesc.BufferDesc.RefreshRate.Numerator = 60;
swapChainDesc.BufferDesc.RefreshRate.Denominator = 1;
swapChainDesc.BufferDesc.Width = width;
swapChainDesc.BufferDesc.Height = height;
swapChainDesc.SampleDesc.Count = 1;
swapChainDesc.SampleDesc.Quality = 0;
swapChainDesc.OutputWindow = *mainWindow;
swapChainDesc.Windowed = TRUE;
D3D10_DRIVER_TYPE driverType = D3D10_DRIVER_TYPE_HARDWARE;
HRESULT hr = D3D10CreateDeviceAndSwapChain(NULL,driverType,NULL,0,
D3D10_SDK_VERSION, &swapChainDesc,&swapChain,&dxDevice);
if(FAILED(hr))//Error is here
throw GXVideoException(L"Problems retrieving directX device");
}
Below is all the values given after the method has finished
- &dxDevice 0x00e74b04 ID3D10Device * *
- 0x00000000 ID3D10Device *
- IUnknown {...} IUnknown
__vfptr CXX0030: Error: expression cannot be evaluated
- &swapChain 0x00e74b08 IDXGISwapChain * *
- 0x00000000 IDXGISwapChain *
- IDXGIDeviceSubObject {...} IDXGIDeviceSubObject
- IDXGIObject {...} IDXGIObject
- IUnknown {...} IUnknown
__vfptr CXX0030: Error: expression cannot be evaluated
- &swapChainDesc 0x002df90c {BufferDesc={...} SampleDesc={...} BufferUsage=0xcccccccc ...} DXGI_SWAP_CHAIN_DESC *
- BufferDesc {Width=0x00000320 Height=0x00000258 RefreshRate={...} ...} DXGI_MODE_DESC
Width 800 unsigned int
Height 600 unsigned int
- RefreshRate {Numerator=60 Denominator=1 } DXGI_RATIONAL
Numerator 60 unsigned int
Denominator 1 unsigned int
Format DXGI_FORMAT_R8G8B8A8_UNORM DXGI_FORMAT
ScanlineOrdering -858993460 DXGI_MODE_SCANLINE_ORDER
Scaling -858993460 DXGI_MODE_SCALING
- SampleDesc {Count=0x00000001 Quality=0x00000000 } DXGI_SAMPLE_DESC
Count 1 unsigned int
Quality 0 unsigned int
BufferUsage 3435973836 unsigned int
BufferCount 2 unsigned int
- OutputWindow 0x008b08ca {unused=-665779669 } HWND__ *
unused -665779669 int
Windowed 1 int
SwapEffect -858993460 DXGI_SWAP_EFFECT
Flags 3435973836 unsigned int
driverType D3D10_DRIVER_TYPE_HARDWARE D3D10_DRIVER_TYPE
hr 0x887a0001 HRESULT
- this 0x00e74af0 {dxDevice=0x00000000 swapChain=0x00000000 } GXDX * const
- GXRenderer {running=true width=0x00000320 height=0x00000258 ...} GXRenderer
- __vfptr 0x013277dc const GXDX::`vftable' *
[0] 0x0132110e GXDX::Render(void) *
[0x1] 0x013211d6 GXDX::StartUp(struct HWND__ * *,int,int) *
[0x2] 0x01321041 GXDX::SetupScene(void) *
[0x3] 0x01321069 GXDX::DisplayScene(void) *
running true bool
width 0x00000320 int
height 0x00000258 int
- mainWindow 0x0132a214 struct HWND__ * GXRenderManager::mainWindow {unused=0x008b08ca } HWND__ *
unused 0x008b08ca int
- dxDevice 0x00000000 ID3D10Device *
+ IUnknown {...} IUnknown
- swapChain 0x00000000 IDXGISwapChain *
- IDXGIDeviceSubObject {...} IDXGIDeviceSubObject
- IDXGIObject {...} IDXGIObject
- IUnknown {...} IUnknown
__vfptr CXX0030: Error: expression cannot be evaluated
[EDIT]
Prior to Goz response, I checked out further debug detail and this is what was recieved
DXGI Error: IDXGIFactory::CreateSwapChain: SwapEffect is unknown.
which I am guessing I did not add the swapeffect attributes. I will do that and check it out
Silly me. earlier I mentioned in a comment that both books I was reading did not include a swapchain effect property. I knew something was off regardless of if I included the swap chain or not.
But I also notice that both books zeroed out the swap chain description. Making it more safe to leave out properties. So I added the following
SecureZeroMemory(&swapChainDesc, sizeof(swapChainDesc));
and everything worked. Set aside this, I should still add a swapchain effect property. But for reason's the book decided not to, i have not figured out yet.
The following code worked for me (I'm using Microsoft Visual Studio Express 2012 for Windows Desktop)
DXGI_SWAP_CHAIN_DESC swapChainDesc;
swapChainDesc.BufferDesc.Width = width;
swapChainDesc.BufferDesc.Height = height;
swapChainDesc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
swapChainDesc.BufferDesc.RefreshRate.Numerator = 60;
swapChainDesc.BufferDesc.RefreshRate.Denominator = 1;
swapChainDesc.SampleDesc.Count = 1;
swapChainDesc.SampleDesc.Quality = 0;
swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDesc.BufferCount = 1;
swapChainDesc.OutputWindow = hWnd;
swapChainDesc.Windowed = true;
swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
swapChainDesc.Flags = 0;
ID3D11Device *pDevice = 0;
ID3D11DeviceContext *pContext = 0;
IDXGISwapChain *pSwapChain = 0;
HRESULT result;
result = D3D11CreateDeviceAndSwapChain(NULL, driverType, NULL, D3D11_CREATE_DEVICE_SINGLETHREADED, featureLevel, totalFeatureLevel, D3D11_SDK_VERSION, &swapChainDesc, &pSwapChain, &pDevice, &currentFeatureLevel, &pContext);
So you can just try adding more attributes to the swapChainDesc.
Try not passing a pointer to your HWND through. You should pass an HWND as a non-pointer. Other than that I don't see anything massively wrong.
This is what DXErrorLookup says about your error:
HRESULT: 0x887a0001 (2289696769)
Name: DXGI_ERROR_INVALID_CALL
Description: The application has made an erroneous API call that it had enough
information to avoid. This error is intended to denote that the application should be
altered to avoid the error. Use of the debug version of the DXGI.DLL will provide run-
time debug output with further information.
Severity code: Failed
Facility Code: FACILITY_DXGI (2170)
Error Code: 0x0001 (1)
So have you considered using the debug version of DXGI to see what the error is?
Btw my working DX10 initialisation is as follows (Warning a LOT of code!):
HRESULT hr = S_OK;
// Wrong init params passed in.
if ( pParams->paramSize != sizeof( D3D10InitParams ) )
return false;
// Upgrade the initparams to the correct version
mInitParams = *(D3D10InitParams*)pParams;
// Create factory.
IDXGIFactory* pFactory = NULL;
if ( FAILED( CreateDXGIFactory( __uuidof( IDXGIFactory ), (void**)&pFactory ) ) )
{
return false;
}
if ( FAILED( pFactory->MakeWindowAssociation( mInitParams.hWnd, 0 ) ) )
{
return false;
}
HWND hTemp;
pFactory->GetWindowAssociation( &hTemp );
// Enumerate adapters.
unsigned int count = 0;
IDXGIAdapter * pAdapter;
std::vector<IDXGIAdapter*> vAdapters;
while( pFactory->EnumAdapters( count, &pAdapter ) != DXGI_ERROR_NOT_FOUND )
{
vAdapters.push_back( pAdapter );
count++;
}
unsigned int selectedAdapter = mInitParams.display;
if ( vAdapters.size() > 1 )
{
// Need to handle multiple available adapters.
}
// Release all other adapters.
count = 0;
unsigned int max = (unsigned int)vAdapters.size();
while( count < max )
{
if ( count != selectedAdapter )
{
vAdapters[count]->Release();
}
count++;
}
// Device should support all basic DX10 features.
// Caps does not support enough basic features.
//if ( !CheckCaps( &caps ) )
// return false;
// Create the D3D 10 device.
DXGI_MODE_DESC dxgiModeDesc;
dxgiModeDesc.Width = mInitParams.width;
dxgiModeDesc.Height = mInitParams.height;
dxgiModeDesc.RefreshRate.Numerator = (mInitParams.refreshRate == 0) ? 60 : mInitParams.refreshRate;
dxgiModeDesc.RefreshRate.Denominator = 1;
dxgiModeDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
dxgiModeDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_PROGRESSIVE;
dxgiModeDesc.Scaling = DXGI_MODE_SCALING_CENTERED;
DXGI_SAMPLE_DESC dxgiSampleDesc;
dxgiSampleDesc.Count = 1;
dxgiSampleDesc.Quality = 0;
//DXGI_USAGE dxgiUsage;
//dxgiUsage.
DXGI_SWAP_CHAIN_DESC dxgiSwapChainDesc;
dxgiSwapChainDesc.BufferDesc = dxgiModeDesc;
dxgiSwapChainDesc.SampleDesc = dxgiSampleDesc;
dxgiSwapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
dxgiSwapChainDesc.BufferCount = 2;
dxgiSwapChainDesc.OutputWindow = mInitParams.hWnd;
dxgiSwapChainDesc.Windowed = mInitParams.windowed;
dxgiSwapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
dxgiSwapChainDesc.Flags = 0;//DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH;
// Set the device as a debug device when compiled for debug.
#ifdef _DEBUG
unsigned int flags = D3D10_CREATE_DEVICE_DEBUG;
#else
unsigned int flags = 0;
#endif
mpAdapter = vAdapters[selectedAdapter];
// Create the device and swap chain.
if ( FAILED( D3D10CreateDeviceAndSwapChain( mpAdapter, D3D10_DRIVER_TYPE_HARDWARE, NULL, flags, D3D10_SDK_VERSION, &dxgiSwapChainDesc, &mpSwapChain, &mpDevice ) ) )
{
return false;
}
// Get the back buffer.
ID3D10Texture2D* pBuffer = NULL;
if ( FAILED( mpSwapChain->GetBuffer( 0, __uuidof( ID3D10Texture2D ), (void**)&pBuffer ) ) )
{
return false;
}
// Create the default render target view.
hr = mpDevice->CreateRenderTargetView( pBuffer, NULL, &mDefaultRenderTarget );
pBuffer->Release();
if ( FAILED( hr ) )
{
return false;
}
// Create depth stencil texture
D3D10_TEXTURE2D_DESC descDepth;
descDepth.Width = mInitParams.width;
descDepth.Height = mInitParams.height;
descDepth.MipLevels = 1;
descDepth.ArraySize = 1;
descDepth.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
descDepth.SampleDesc.Count = 1;
descDepth.SampleDesc.Quality = 0;
descDepth.Usage = D3D10_USAGE_DEFAULT;
descDepth.BindFlags = D3D10_BIND_DEPTH_STENCIL;
descDepth.CPUAccessFlags = 0;
descDepth.MiscFlags = 0;
if ( FAILED( mpDevice->CreateTexture2D( &descDepth, NULL, &mpDepthStencilTex ) ) )
{
return false;
}
// Create the depth stencil view
D3D10_DEPTH_STENCIL_VIEW_DESC descDSV;
descDSV.Format = descDepth.Format;
descDSV.ViewDimension = D3D10_DSV_DIMENSION_TEXTURE2D;
descDSV.Texture2D.MipSlice = 0;
if ( FAILED( mpDevice->CreateDepthStencilView( mpDepthStencilTex, &descDSV, &mDefaultDepthStencilTarget ) ) )
{
return false;
}
// Set the default render targets.
mpDevice->OMSetRenderTargets( 1, &mDefaultRenderTarget, mDefaultDepthStencilTarget );
mpEffectDevice = new D3D10EffectStateDevice( GetDevice() );
// Set the default render states.
SetupRenderStates();
// Set the default viewport.
D3D10_VIEWPORT d3d10ViewPort;
d3d10ViewPort.Width = mInitParams.width;
d3d10ViewPort.Height = mInitParams.height;
d3d10ViewPort.TopLeftX = 0;
d3d10ViewPort.TopLeftY = 0;
d3d10ViewPort.MinDepth = 0.0f;
d3d10ViewPort.MaxDepth = 1.0f;
GetDevice()->RSSetViewports( 1, &d3d10ViewPort );
I hope thats some help!