directX 11 scale texture2D - c++

I would like to scale a texture I created from AcquireNextFrame.
Here is my code :
if (gRealTexture == nullptr) {
D3D11_TEXTURE2D_DESC description;
texture2D->GetDesc(&description);
description.BindFlags = 0;
description.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
description.Usage = D3D11_USAGE_STAGING;
description.MiscFlags = 0;
hr = gDevice->CreateTexture2D(&description, NULL, &gRealTexture);
if (FAILED(hr)) {
if (gRealTexture) {
gRealTexture->Release();
gRealTexture = nullptr;
}
return NULL;
}
}
gImmediateContext->CopyResource(gRealTexture, texture2D);
D3D11_MAPPED_SUBRESOURCE mapped;
hr = gImmediateContext->Map(gRealTexture, 0, D3D11_MAP_READ_WRITE, 0, &mapped);
if (FAILED(hr)) {
gRealTexture->Release();
gRealTexture = NULL;
return NULL;
}
unsigned char *source = static_cast<unsigned char *>(mapped.pData); //Here I get the pixel buffer data
Problem is that it is in Full HD Resolution (1920x1080). I would like to decrease the resolution (1280x720 for example) because I need to send source over network. And I don't really need a Full HD image.
Is it possible to do it with DirectX easily before I get the pixel buffer ?
Thanks

Create a smaller resolution texture, render the texture2d to the smaller texture using a fullscreen quad (but shrinking to the new size). Make sure bilinear filtering is on.
Then copy and map the smaller texture. CopyResource needs the same dimensions.
Some resources:
DX11 Helper Library
Render To Texture
DX11 Post Processing blur
The blur setup is the same as what I'm talking about, only instead of using a blur shader you use a simple texture shader.

Related

Why does DirectXToolkit ruin my depth testing

I'm sure I'm just missing some simple step that I've been too blind to notice so far, but I cannot seem to get depth testing to work at all. This is with DirectX 11.
The code that should set it all up:
DXGI_SWAP_CHAIN_DESC swapDesc = { };
swapDesc.BufferDesc.Width = 0;
swapDesc.BufferDesc.Height = 0;
swapDesc.BufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
swapDesc.BufferDesc.RefreshRate.Numerator = 0;
swapDesc.BufferDesc.RefreshRate.Denominator = 1;
swapDesc.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
swapDesc.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
swapDesc.SampleDesc.Count = 1;
swapDesc.SampleDesc.Quality = 0;
swapDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapDesc.BufferCount = 1;
swapDesc.OutputWindow = hwnd;
swapDesc.Windowed = TRUE;
swapDesc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
swapDesc.Flags = 0;
UINT flg = 0;
#if MAGE_DEBUG
flg |= D3D11_CREATE_DEVICE_DEBUG;
#endif
GFX_THROW_INFO(D3D11CreateDeviceAndSwapChain(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr,
flg,
nullptr, 0,
D3D11_SDK_VERSION, &swapDesc, &mSwap, &mDevice, nullptr,
&mContext));
COMptr<ID3D11Resource> backBuffer;
GFX_THROW_INFO(mSwap->GetBuffer(0, __uuidof(ID3D11Resource), &backBuffer));
GFX_THROW_INFO(mDevice->CreateRenderTargetView(backBuffer.Get(), nullptr, &mTarget));
LOG_INFO("Setting depth stencil dimensions ({}, {})", width, height);
COMptr<ID3D11Texture2D> depthStencil;
D3D11_TEXTURE2D_DESC texDesc = { };
texDesc.Width = width;
texDesc.Height = height;
texDesc.MipLevels = 1;
texDesc.ArraySize = 1;
texDesc.Format = DXGI_FORMAT_D32_FLOAT;
texDesc.SampleDesc.Count = 1;
texDesc.SampleDesc.Quality = 0;
texDesc.Usage = D3D11_USAGE_DEFAULT;
texDesc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
GFX_THROW_INFO(mDevice->CreateTexture2D(&texDesc, nullptr, &depthStencil));
D3D11_DEPTH_STENCIL_DESC depth = { };
depth.DepthEnable = TRUE;
depth.DepthWriteMask = D3D11_DEPTH_WRITE_MASK_ALL;
depth.DepthFunc = D3D11_COMPARISON_LESS;
COMptr<ID3D11DepthStencilState> depthState;
GFX_THROW_INFO(mDevice->CreateDepthStencilState(&depth, &depthState));
D3D11_DEPTH_STENCIL_VIEW_DESC dsvDesc = { };
dsvDesc.Format = DXGI_FORMAT_D32_FLOAT;
dsvDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
dsvDesc.Texture2D.MipSlice = 0;
GFX_THROW_INFO(mDevice->CreateDepthStencilView(depthStencil.Get(), &dsvDesc, &mDepthStencilView));
mContext->OMSetDepthStencilState(depthState.Get(), 1);
mContext->OMSetRenderTargets(1, mTarget.GetAddressOf(), mDepthStencilView.Get());
LOG_INFO("Setting viewport dimensions ({}, {})", width, height);
D3D11_VIEWPORT vp;
vp.Width = (float) width;
vp.Height = (float) height;
vp.MinDepth = 0.0f;
vp.MaxDepth = 1.0f;
vp.TopLeftX = 0.0f;
vp.TopLeftY = 0.0f;
mContext->RSSetViewports(1, &vp);
And of course, before every frame I call the following:
mContext->ClearRenderTargetView(mTarget.Get(), color);
mContext->ClearDepthStencilView(mDepthStencilView.Get(), D3D11_CLEAR_DEPTH, 1.0f, 0);
But unfortunately, the result ends up being this (note that the crysis nanosuit model is behind the goblin head) I believe this could potentially also be why the goblin model is rendering incorrectly even when alone but haven't figured that one out yet.
Example 1
And with just the goblin, looking from an angle
Example 2
If anyone can help me figure out why its not working, I'd greatly appreciate it!
EDIT
After some more frustrating testing I discovered the depth testing was broken because of some test text rendering I was doing with DirectX ToolKit's SpriteBatch and SpriteFont classes. Has anyone come across this issue before? I don't really want/need the toolkit for anything other than text rendering and perhaps loading dds textures so I'm hoping if I want to use those classes I don't need to drastically change my existing code?
DirectX Tool Kit does not 'capture/restore' state like the legacy D3DX9/D3DX10 sprite did. This was inefficient and relied on some hacky back-door functionality to capture the 'state block' for Direct3D 10+. In most cases, you are already going to set the bulk of the commonly used state to set up for the next draw call anyhow.
Instead, I have fully documented all state impacted by each class. You are expected to change all required state after the DirectX Tool Kit object renders. For example, SpriteBatch docs state:
SpriteBatch makes use of the following states:
BlendState
Constant buffer (Vertex Shader stage, slot 0)
DepthStencilState
Index buffer
Input layout
Pixel shader
Primitive topology
RasterizerState
SamplerState (Pixel Shader stage, slot 0)
Shader resources (Pixel Shader stage, slot 0)
Vertex buffer (slot 0)
Vertex shader
So in short, you just need to set the DepthStencilState to what you want to use after you call SpriteBatch::End.
As a general habit for state management, you should set all state you rely on every frame. While in Direct3D 11 the 'last state' at the time you call Present is still there at the start of the next frame, this isn't true of DirectX 12. As such, you should make a habit of at the start of a new frame setting everything like current render target, viewport, render states you expect to be present for your whole scene, etc.
For example, most "HUD" rendering is done last, so the state changes by SpriteBatch would normally be reset on the next frame's start--again, assuming you set up the required state at the start of the frame rather than assuming it remains unchanged over many frames.
TL;DR: Move this code to just after you clear the render target each frame:
mContext->OMSetDepthStencilState(depthState.Get(), 1);
mContext->OMSetRenderTargets(1, mTarget.GetAddressOf(), mDepthStencilView.Get());
D3D11_VIEWPORT vp = { 0.f, 0.f, float(width), float(height), D3D11_MIN_DEPTH, D3D11_MAX_DEPTH };
mContext->RSSetViewports(1, &vp);

DirectX - Nothing renders after enabling depth buffer

I was trying to implement depth buffer into my renderer in DirectX 11.0, but I encountered specyfic problem. I'm new in DirectX so it might be stupid question, but I can't fix it by myself. I checked many tutorials about this topic and each show how to do this more or less the same.
I have got two triangles on the scene. When I enable depth everythink disappers and I have got blue screen (background color) only.
To enable depth buffer I firstly created "Depth Stencil Texture Description" and created "Depth Stencil Buffer" with "Depth Stencil View". Then as last parameter of function OMSetRenderTargets I set DepthStencilView. After that I created "Depth Stencil State".
D3D11_TEXTURE2D_DESC depthStencilTextureDesc;
depthStencilTextureDesc.Width = width;
depthStencilTextureDesc.Height = height;
depthStencilTextureDesc.MipLevels = 1;
depthStencilTextureDesc.ArraySize = 1;
depthStencilTextureDesc.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
depthStencilTextureDesc.SampleDesc.Count = 1;
depthStencilTextureDesc.SampleDesc.Quality = 0;
depthStencilTextureDesc.Usage = D3D11_USAGE_DEFAULT;
depthStencilTextureDesc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
depthStencilTextureDesc.CPUAccessFlags = 0;
depthStencilTextureDesc.MiscFlags = 0;
hr = Device->CreateTexture2D(&depthStencilTextureDesc, nullptr, DepthStencilBuffer.GetAddressOf());
if (FAILED(hr))
{
Logger::Error("Error creating depth stencil buffer!");
return false;
}
hr = Device->CreateDepthStencilView(DepthStencilBuffer.Get(), nullptr, DepthStencilView.GetAddressOf());
if (FAILED(hr))
{
Logger::Error("Error creating depth stencil view!");
return false;
}
Logger::Debug("Successfully created depth stencil buffer and view.");
DeviceContext->OMSetRenderTargets(1, RenderTargetView.GetAddressOf(), DepthStencilView.Get());
Logger::Debug("Binding render target output merge successfully.");
D3D11_DEPTH_STENCIL_DESC depthStencilDesc;
ZeroMemory(&depthStencilDesc, sizeof(D3D11_DEPTH_STENCIL_DESC));
depthStencilDesc.DepthEnable = true;
depthStencilDesc.DepthWriteMask = D3D11_DEPTH_WRITE_MASK_ALL;
depthStencilDesc.DepthFunc = D3D11_COMPARISON_LESS_EQUAL;
hr = Device->CreateDepthStencilState(&depthStencilDesc, DepthStencilState.GetAddressOf());
if (FAILED(hr))
{
Logger::Error("Error creating depth stencil state!");
return false;
}
Then I set viewport depth with this code:
viewport.MinDepth = 0.0f;
viewport.MaxDepth = 1.0f;
Then I moved to my Render function and added clearing depth stencil and setting state like this:
...
DeviceContext->ClearDepthStencilView(DepthStencilView.Get(), D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 1.0f, 0);
...
DeviceContext->OMSetDepthStencilState(DepthStencilState.Get(), 0);
And... It doesn't work. If change last parameter of OMSetRenderTargets from DepthStencilView.Get() to nullptr it works. So it seams like I did somethink wrong with depth stencil, but I'm not sure what. I created gist for this Renderer.cpp HERE. Please help me solve this, becase I'm stucked in this and I don't know what to do.
When creating a Depth/Stencil View, make sure that the MSAA settings for Sample and Count are the same for both the Render Target View and the Depth Stencil View.
The DSV may need additional information when being created for an MSAA target. Here is an example of how my DSV is created (note that I am not using the Stencil Buffer and instead chose to get more precision on my depth buffer):
//Describe our Depth/Stencil Buffer
D3D11_TEXTURE2D_DESC depthStencilDesc;
depthStencilDesc.Width = activeDisplayMode.Width;
depthStencilDesc.Height = activeDisplayMode.Height;
depthStencilDesc.MipLevels = 1;
depthStencilDesc.ArraySize = 1;
depthStencilDesc.Format = DXGI_FORMAT_R32_TYPELESS;
depthStencilDesc.SampleDesc.Count = sampleLevel;
depthStencilDesc.SampleDesc.Quality = qualityLevel;
depthStencilDesc.Usage = D3D11_USAGE_DEFAULT;
depthStencilDesc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
depthStencilDesc.CPUAccessFlags = 0;
depthStencilDesc.MiscFlags = 0;
if (MSAAEnabled == true)
{
//Need a DSVDesc to let it know to use MSAA
D3D11_DEPTH_STENCIL_VIEW_DESC depthStencilViewDesc;
ZeroMemory(&depthStencilViewDesc, sizeof(D3D11_DEPTH_STENCIL_VIEW_DESC));
depthStencilViewDesc.Format = DXGI_FORMAT_D32_FLOAT;
depthStencilViewDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2DMS;
depthStencilViewDesc.Texture2D.MipSlice = 0;
dev->CreateTexture2D(&depthStencilDesc, NULL, &depthStencilBuffer);
dev->CreateDepthStencilView(depthStencilBuffer, &depthStencilViewDesc, &depthStencilView);
}
else
{
//Don't need a DSVDesc
dev->CreateTexture2D(&depthStencilDesc, NULL, &depthStencilBuffer);
dev->CreateDepthStencilView(depthStencilBuffer, NULL, &depthStencilView);
}
I will summerize what I found with GaleRazorwind help. I fixed my problem by setting depthStencilTextureDesc multisampling values to the same values from BufferDesc.

Convert IMFSample* to ID3D11ShaderResourceView*

I am new to DirectX an I am trying to do a simple application that reads a video and display it on a Quad.
I read the video using Windows Media Foundation (IMFSourceReader), that sends me a callback when a sample is decoded (IMFSample).
I want to convert this IMFSample* to a ID3D11ShaderResourceView* in order to use it as a texture to draw my quad, however the conversion fails.
Here is what I do (I removed non relevant error checks):
HRESULT SourceReaderCB::OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex, DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample)
{
...
DWORD NumBuffers = 0;
hr = pSample->GetBufferCount(&NumBuffers);
if (FAILED(hr) || NumBuffers < 1)
{
...
}
IMFMediaBuffer* SourceMediaPtr = nullptr;
hr = pSample->GetBufferByIndex(0, &SourceMediaPtr);
if (FAILED(hr))
{
...
}
ComPtr<IMFMediaBuffer> _pInputBuffer = SourceMediaPtr;
ComPtr<IMF2DBuffer2> _pInputBuffer2D2;
bool isVideoFrame = (_pInputBuffer.As(&_pInputBuffer2D2) == S_OK);
if (isVideoFrame)
{
IMFDXGIBuffer* pDXGIBuffer = NULL;
ID3D11Texture2D* pSurface = NULL;
hr = _pInputBuffer->QueryInterface(__uuidof(IMFDXGIBuffer), (LPVOID*)&pDXGIBuffer);
if (FAILED(hr))
{
SafeRelease(&SourceMediaPtr);
goto done;
}
hr = pDXGIBuffer->GetResource(__uuidof(ID3D11Texture2D), (LPVOID*)&pSurface);
if (FAILED(hr))
{
...
}
ID3D11ShaderResourceView* resourceView;
if (pSurface)
{
D3D11_TEXTURE2D_DESC textureDesc;
pSurface->GetDesc(&textureDesc);
D3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc;
shaderResourceViewDesc.Format = DXGI_FORMAT_R8_UNORM;
shaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
shaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
shaderResourceViewDesc.Texture2D.MipLevels = 1;
ID3D11ShaderResourceView* resourceView;
hr = d3d11device->CreateShaderResourceView(pSurface, &shaderResourceViewDesc, &resourceView);
if (FAILED(hr))
{
... // CODE FAILS HERE
}
...
}
}
}
My first issue is that I set the shaderResourceViewDesc.Format as DXGI_FORMAT_R8_UNORM which will probably just give me red image (I will have to investigate this later).
The second and blocking issue I am facing ius that the conversion of ID3D11Texture2D to ID3D11ShaderResourceView fails with following error message:
ID3D11Device::CreateShaderResourceView: A ShaderResourceView cannot be created of a Resource that did not specify the D3D11_BIND_SHADER_RESOURCE BindFlag. [ STATE_CREATION ERROR #129: CREATESHADERRESOURCEVIEW_INVALIDRESOURCE]
I understand that there is a flag missing at the creation of the texture that prevents me to do what I want to do, but as the data buffer is created by WMF, I am not sure what I am supposed to do to fix this issue.
Thanks for your help
I see you code, and I can say that your way is wrong - no offense. Firstly, video decoder creates simple texture - in you situation DirectX11 texture - it is a regular texture - it is not shader resource, as a result it cannot be used in shader code. In my view, there are two way for resolving of your task:
Research - Walkthrough: Using MF to render video in a Direct3D app - this link present way for "Walkthrough: Using Microsoft Media Foundation for Windows Phone 8" - from your code I see that you try write solution for WindowsStore - UWP and code for Windows Phone is workable - this code needs MediaEnginePlayer - The MediaEnginePlayer class serves as a helper class that wraps the MF APIs;
Find on GitHub Windows-classic-samples and find in that DX11VideoRenderer - this is full code of Media Foundation renderer with DirectX11 - it includes very good example for using of DirectX11 Video Processor which does blitting of regular video texture from decoder into the rendering video texture of swap-chain:
2.1. Get rendering texture from Swap Chain:
// Get Backbuffer
hr = m_pSwapChain1->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**)&pDXGIBackBuffer);
if (FAILED(hr))
{
break;
}
2.2. Create from rendering texture output view of video processor:
//
// Create Output View of Output Surfaces.
//
D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC OutputViewDesc;
ZeroMemory( &OutputViewDesc, sizeof( OutputViewDesc ) );
if (m_b3DVideo && m_bStereoEnabled)
{
OutputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2DARRAY;
}
else
{
OutputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D;
}
OutputViewDesc.Texture2D.MipSlice = 0;
OutputViewDesc.Texture2DArray.MipSlice = 0;
OutputViewDesc.Texture2DArray.FirstArraySlice = 0;
if (m_b3DVideo && 0 != m_vp3DOutput)
{
OutputViewDesc.Texture2DArray.ArraySize = 2; // STEREO
}
QueryPerformanceCounter(&lpcStart);
hr = m_pDX11VideoDevice->CreateVideoProcessorOutputView(pDXGIBackBuffer, m_pVideoProcessorEnum, &OutputViewDesc, &pOutputView);
2.3. Create from regular decoder video texture input view for video processor:
D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC InputLeftViewDesc;
ZeroMemory( &InputLeftViewDesc, sizeof( InputLeftViewDesc ) );
InputLeftViewDesc.FourCC = 0;
InputLeftViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D;
InputLeftViewDesc.Texture2D.MipSlice = 0;
InputLeftViewDesc.Texture2D.ArraySlice = dwLeftViewIndex;
hr = m_pDX11VideoDevice->CreateVideoProcessorInputView(pLeftTexture2D, m_pVideoProcessorEnum, &InputLeftViewDesc, &pLeftInputView);
if (FAILED(hr))
{
break;
}
2.4. Do blitting of regular decoder video texture on rendering texture from Swap Chain:
D3D11_VIDEO_PROCESSOR_STREAM StreamData;
ZeroMemory( &StreamData, sizeof( StreamData ) );
StreamData.Enable = TRUE;
StreamData.OutputIndex = 0;
StreamData.InputFrameOrField = 0;
StreamData.PastFrames = 0;
StreamData.FutureFrames = 0;
StreamData.ppPastSurfaces = NULL;
StreamData.ppFutureSurfaces = NULL;
StreamData.pInputSurface = pLeftInputView;
StreamData.ppPastSurfacesRight = NULL;
StreamData.ppFutureSurfacesRight = NULL;
if (m_b3DVideo && MFVideo3DSampleFormat_MultiView == m_vp3DOutput && pRightTexture2D)
{
StreamData.pInputSurfaceRight = pRightInputView;
}
hr = pVideoContext->VideoProcessorBlt(m_pVideoProcessor, pOutputView, 0, 1, &StreamData );
if (FAILED(hr))
{
break;
}
Yes, they are sections of complex code, and it needs research whole DX11VideoRenderer project for understanding of it - it will take huge amount of time.
Regards,
Evgeny Pereguda
Debug output suggests that the texture is not compatible, as it was created without D3D11_BIND_SHADER_RESOURCE flag (specified in BindFlag field of D3D11_TEXTURE2D_DESC structure.
You read the texture already created by Media Foundation primitive. In some cases you can alter the creation flags, however the general case is that you need to create a compatible texture on your own, copy the data between the textures, and then call CreateShaderResourceView method with your texture as an argument rather than original texture.

Direct3D 11 depth buffer result in black screen

I tried to implement the depth buffer in Direct3D 11, but it turns out to not be as easy as I fought. Here's my problem : Whenever I'm calling this function :
m_DeviceContext->OMSetRenderTargets(1, &m_RTV, m_DepthStencilView);
With the 3rd parameter as my depth stencil view, I see nothing in my window ; everything is black. If I put nullptr then all my geometries are rendered without any depth testing (like it was before I atempt to implement depth). Here's how I'm creating my depth buffer and my depth stencil view :
D3D11_TEXTURE2D_DESC depthBufferDesc;
ZeroMemory(&depthBufferDesc, sizeof(D3D11_TEXTURE2D_DESC));
depthBufferDesc.Width = iwidth;
depthBufferDesc.Height = iheight;
depthBufferDesc.MipLevels = 1;
depthBufferDesc.ArraySize = 1;
depthBufferDesc.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
depthBufferDesc.SampleDesc.Count = 1;
depthBufferDesc.SampleDesc.Quality = 0;
depthBufferDesc.Usage = D3D11_USAGE_DEFAULT;
depthBufferDesc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
hr = m_Device->CreateTexture2D(&depthBufferDesc, nullptr, &m_DepthStencilBuffer);
hr = m_Device->CreateDepthStencilView(m_DepthStencilBuffer, nullptr, &m_DepthStencilView);
I search for quite some time to find the solution but I didn't help me. I also checked the viewport but it doesn't work. I also clean up the view every frame.
Can you help me out please ? I'll appreciate it :) Thanks.
You probably forgot to clear your depth buffer to 1.0 before trying to use it.

Unable to read depth buffer from Compute shader

I am unable to read depth buffer from compute shader.
I am using this in my hlsl code.
Texture2D<float4> gDepthTextures : register(t3);
// tried this.
//Texture2D<float> gDepthTextures : register(t3);
// and this.
//Texture2D<uint> gDepthTextures : register(t3);
// and this.
//Texture2D<uint4> gDepthTextures : register(t3);
And doing this to read the buffer.
outputTexture[dispatchThreadId.xy]=gDepthTextures.Load(int3(dispatchThreadId.xy,0));
And I am detaching depth buffer from render target.
ID3D11RenderTargetView *nullView[3]={NULL,NULL,NULL};
g_pImmediateContext->OMSetRenderTargets( 3, nullView, NULL );
Still I am getting this error in output.
*D3D11 ERROR: ID3D11DeviceContext::Dispatch: The Shader Resource View dimension declared in the shader code (TEXTURE2D) does not match the view type bound to slot 3 of the Compute Shader unit (BUFFER). This mismatch is invalid if the shader actually uses the view (e.g. it is not skipped due to shader code branching). [ EXECUTION ERROR #354: DEVICE_DRAW_VIEW_DIMENSION_MISMATCH]*
This is how I am creating shader resource view.
// Create depth stencil texture
D3D11_TEXTURE2D_DESC descDepth;
ZeroMemory( &descDepth, sizeof(descDepth) );
descDepth.Width = width;
descDepth.Height = height;
descDepth.MipLevels = 1;
descDepth.ArraySize = 1;
descDepth.Format = DXGI_FORMAT_R32_TYPELESS;
descDepth.SampleDesc.Count = 1;
descDepth.SampleDesc.Quality = 0;
descDepth.Usage = D3D11_USAGE_DEFAULT;
descDepth.BindFlags = D3D11_BIND_DEPTH_STENCIL | D3D11_BIND_SHADER_RESOURCE;
descDepth.CPUAccessFlags = 0;
descDepth.MiscFlags = 0;
hr = g_pd3dDevice->CreateTexture2D( &descDepth, NULL, &g_pDepthStencil );
if( FAILED( hr ) )
return hr;
// Create the depth stencil view
D3D11_DEPTH_STENCIL_VIEW_DESC descDSV;
ZeroMemory( &descDSV, sizeof(descDSV) );
descDSV.Format = DXGI_FORMAT_D32_FLOAT;
descDSV.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
descDSV.Texture2D.MipSlice = 0;
hr = g_pd3dDevice->CreateDepthStencilView( g_pDepthStencil, &descDSV, &g_pDepthStencilView );
if( FAILED( hr ) )
return hr;
// Create depth shader resource view.
D3D11_SHADER_RESOURCE_VIEW_DESC srvDesc;
ZeroMemory(&srvDesc,sizeof(D3D11_SHADER_RESOURCE_VIEW_DESC));
srvDesc.Format=DXGI_FORMAT_R32_UINT;
srvDesc.ViewDimension=D3D11_SRV_DIMENSION_TEXTURE2D;
srvDesc.Texture2D.MostDetailedMip=0;
srvDesc.Texture2D.MipLevels=1;
hr=g_pd3dDevice->CreateShaderResourceView(g_pDepthStencil,&srvDesc,&g_pDepthSRV);
if(FAILED(hr))
return hr;
I have tried all the formats mentioned here in combination with the hlsl texture formats float, float4, uint, uint4 with no success. Any idea?
Replace DXGI_FORMAT_R32_UINT by DXGI_FORMAT_R32_FLOAT for your shader resource view, since you use R32_Typeless, you have a floating point buffer.
Texture2D gDepthTextures will be the one you need to load or sample the depth later.
Also it looks like that your texture is not bound properly to your compute shader (since runtime tells you you have a buffer bound in there).
Make sure you have:
immediateContext->CSSetShaderResources(3,1,g_pDepthSRV);
Called before your dispatch.
As a side note, to debug those type of issues, you can also call CSGetShaderResources (and other equivalent), in order to check what's bound in your pipeline before your call.