Depth Buffer on DirectX10 not working - c++

So I'm trying to create a scene in DirectX10 and for some reason my zbuffer(depth buffer) doesn't seem to be working. Here is the code where I set up the Zbuffer:
// Initialize the description of the depth buffer.
ZeroMemory(&depthBufferDesc, sizeof(depthBufferDesc));
// Set up the description of the depth buffer.
depthBufferDesc.Width = _iScreenWidth;
depthBufferDesc.Height = _iScreenHeight;
depthBufferDesc.MipLevels = 1;
depthBufferDesc.ArraySize = 1;
depthBufferDesc.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
depthBufferDesc.SampleDesc.Count = 1;
depthBufferDesc.SampleDesc.Quality = 0;
depthBufferDesc.Usage = D3D10_USAGE_DEFAULT;
depthBufferDesc.BindFlags = D3D10_BIND_DEPTH_STENCIL;
depthBufferDesc.CPUAccessFlags = 0;
depthBufferDesc.MiscFlags = 0;
// Create the texture for the depth buffer using the filled out description.
if(FAILED(m_pDevice->CreateTexture2D(&depthBufferDesc, NULL, &m_pDepthStencilBuffer)))
{
return false;
}
// Initialize the description of the stencil state.
ZeroMemory(&depthStencilDesc, sizeof(depthStencilDesc));
// Set up the description of the stencil state.
depthStencilDesc.DepthEnable = true;
depthStencilDesc.DepthWriteMask = D3D10_DEPTH_WRITE_MASK_ALL;
depthStencilDesc.DepthFunc = D3D10_COMPARISON_LESS;
depthStencilDesc.StencilEnable = true;
depthStencilDesc.StencilReadMask = 0xFF;
depthStencilDesc.StencilWriteMask = 0xFF;
// Stencil operations if pixel is front-facing.
depthStencilDesc.FrontFace.StencilFailOp = D3D10_STENCIL_OP_KEEP;
depthStencilDesc.FrontFace.StencilDepthFailOp = D3D10_STENCIL_OP_INCR;
depthStencilDesc.FrontFace.StencilPassOp = D3D10_STENCIL_OP_KEEP;
depthStencilDesc.FrontFace.StencilFunc = D3D10_COMPARISON_ALWAYS;
// Stencil operations if pixel is back-facing.
depthStencilDesc.BackFace.StencilFailOp = D3D10_STENCIL_OP_KEEP;
depthStencilDesc.BackFace.StencilDepthFailOp = D3D10_STENCIL_OP_DECR;
depthStencilDesc.BackFace.StencilPassOp = D3D10_STENCIL_OP_KEEP;
depthStencilDesc.BackFace.StencilFunc = D3D10_COMPARISON_ALWAYS;
// Create the depth stencil state.
if(FAILED(m_pDevice->CreateDepthStencilState(&depthStencilDesc, &m_pDepthStencilState)))
{
return false;
}
// Set the depth stencil state on the D3D device.
m_pDevice->OMSetDepthStencilState(m_pDepthStencilState, 1);
// Initailze the depth stencil view.
ZeroMemory(&depthStencilViewDesc, sizeof(depthStencilViewDesc));
// Set up the depth stencil view description.
depthStencilViewDesc.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
depthStencilViewDesc.ViewDimension = D3D10_DSV_DIMENSION_TEXTURE2D;
depthStencilViewDesc.Texture2D.MipSlice = 0;
// Create the depth stencil view.
if(FAILED(m_pDevice->CreateDepthStencilView(m_pDepthStencilBuffer, &depthStencilViewDesc, &m_pDepthStencilView)))
{
return false;
}
// Bind the render target view and depth stencil buffer to the output render pipeline.
m_pDevice->OMSetRenderTargets(1, &m_pRenderTargetView, m_pDepthStencilView);
// Setup the raster description which will determine how and what polygons will be drawn.
rasterDesc.AntialiasedLineEnable = true;
rasterDesc.CullMode = D3D10_CULL_BACK;
rasterDesc.DepthBias = 0;
rasterDesc.DepthBiasClamp = 0.0f;
rasterDesc.DepthClipEnable = true;
rasterDesc.FillMode = D3D10_FILL_SOLID;
rasterDesc.FrontCounterClockwise = false;
rasterDesc.MultisampleEnable = false;
rasterDesc.ScissorEnable = false;
rasterDesc.SlopeScaledDepthBias = 0.0f;
// Uncomment to turn off back-face culling
//rasterDesc.CullMode = D3D10_CULL_NONE;
//rasterDesc.FillMode = D3D10_FILL_WIREFRAME;
// Create the rasterizer state from the description we just filled out.
if(FAILED(m_pDevice->CreateRasterizerState(&rasterDesc, &m_pRasterState)))
{
return false;
}
And here is what it looks like in my scene:
http://imgur.com/UsaURcR
I used the resource (http://msdn.microsoft.com/en-us/library/windows/desktop/bb205074(v=vs.85).aspx) to step by step me through but am now quite confused as to why it's not working.
Any help would be VERY much appreciated, thanks!

1> Your depthBufferDesc and depthStencilViewDesc format should be like:
DXGI_FORMAT_D32_FLOAT
2> And depending on your model, you can try setup rasterDesc.FrontCounterClockwise to true.
rasterDesc.FrontCounterClockwise = true;
3> Please check that you viewport has set:
ViewPort.MinDepth = 0.0f;
ViewPort.MaxDepth = 1.0f;
4> And you have to check you near clipping plane in your camera. The value have to be greater then zero.

Related

dx11 Rendering to texture shows only clearcolor

Im trying to create a game view for my game engine using ImGui framework,however when i try to render texture,there is only clear color displayed on the screen.
My plan
Creating Render Target Texture(I think i done this part right(i think))
Writing my scene into target texture(I presume this is where i failed,only clear color displayed on the screen,my test triangle not displayed)
Crearting a shader resource view(I tested this and the next step via loading image from the disk,it shows no problem here)
Display it on İmgui::Image()
Using render texture only clear color and Imgui Editor elements displayed on the screen when set back to normal rendering,my test triangle appears on the screen.
My Render Texture Creation
D3D11_TEXTURE2D_DESC textureDesc;
D3D11_RENDER_TARGET_VIEW_DESC renderTargetViewDesc;
D3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc;
ZeroMemory(&textureDesc, sizeof(textureDesc));
textureDesc.Width = 1920;
textureDesc.Height = 1080;
textureDesc.MipLevels = 1;
textureDesc.ArraySize = 1;
textureDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
textureDesc.SampleDesc.Count = 1;
textureDesc.Usage = D3D11_USAGE_DEFAULT;
textureDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
textureDesc.CPUAccessFlags = 0;
textureDesc.MiscFlags = 0;
textureDesc.SampleDesc.Count = 1;
textureDesc.SampleDesc.Quality = 0;
renderTargetViewDesc.Format = textureDesc.Format;
renderTargetViewDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
renderTargetViewDesc.Texture2D.MipSlice = 0;
DLE_Graphics::CurrentGraphics->pDevice->CreateTexture2D(&textureDesc, NULL, pTargetTexture.GetAddressOf());
shaderResourceViewDesc.Format = textureDesc.Format;
shaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
shaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
shaderResourceViewDesc.Texture2D.MipLevels = 1;
DLE_Graphics::CurrentGraphics->pDevice->CreateShaderResourceView(this->pTargetTexture.Get(), &shaderResourceViewDesc, this->pResourceView.GetAddressOf());
// (re)-create the render target view
DLE_Graphics::CurrentGraphics->pSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), reinterpret_cast<void**>(pTargetTexture.GetAddressOf()));
DLE_Graphics::CurrentGraphics->pDevice->CreateRenderTargetView(pTargetTexture.Get(), &renderTargetViewDesc, pRenderTargetView.GetAddressOf());
// create the depth and stencil buffer
D3D11_TEXTURE2D_DESC dsd;
pTargetTexture->GetDesc(&dsd);
dsd.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
dsd.Usage = D3D11_USAGE_DEFAULT;
dsd.BindFlags = D3D11_BIND_DEPTH_STENCIL;
DLE_Graphics::CurrentGraphics->pDevice->CreateTexture2D(&dsd, NULL, pDepthTexture.GetAddressOf());
DLE_Graphics::CurrentGraphics->pDevice->CreateDepthStencilView(pDepthTexture.Get(), NULL, pDepthView.GetAddressOf());
// activate the depth and stencil buffer
DLE_Graphics::CurrentGraphics->pContext->OMSetRenderTargets(1, pRenderTargetView.GetAddressOf(), pDepthView.Get());
My Rendering Code
const FLOAT clr[] = { 0.0f, 0.5f, 0.5f, 0.0f };
pContext->ClearRenderTargetView(pRenderTexture->GetRenderView().Get(), clr);
pContext->ClearDepthStencilView(pRenderTexture->GetDepthView().Get(), D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 0u, 0u);
pContext->OMSetRenderTargets(1u, pRenderTexture->GetRenderView().GetAddressOf(), pRenderTexture->GetDepthView().Get());
pContext->DrawIndexed(size, 0u, 0u);
pSwapChain->Present(0u, 0u);
If anyone curious this is my swap chain description
DXGI_SWAP_CHAIN_DESC description = {};
description.BufferDesc.Width = 1920;
description.BufferDesc.Height = 1080;
description.BufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
description.BufferDesc.RefreshRate.Numerator = 60;
description.BufferDesc.RefreshRate.Denominator = 1;
description.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
description.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
description.SampleDesc.Count = 1;
description.SampleDesc.Quality = 0;
description.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT ;
description.BufferCount = 1;
description.OutputWindow = hWnd;
description.Windowed = TRUE;
description.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
description.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH;
When i use my render texture and create a resource view and use ImGui::Image to show there is no image,not even clear color.If i use my image on the disk for the ImGui::Image it displays the image,i dont what im doing wrong here
Here's my imgui code
ImGui::Begin("Spectrum Observer");
ImVec2 pos = ImGui::GetCursorScreenPos();
//ImGui::Image(texture->GetResource(), ImVec2(512, 512)); // my image from the disk
ImGui::Image(pGraphics->pRenderTexture->GetResourceView().Get(), ImVec2(512, 512)); // render texture
ImGui::End();
ImGui::Render();
ImGui_ImplDX11_RenderDrawData(ImGui::GetDrawData());
I appreciate every input possible,thank you
I managed to solve the triangle rendering problem via changing ClearDepthStencilView() 0u to 1u
const FLOAT clr[] = { 0.0f, 0.5f, 0.5f, 0.0f };
pContext->ClearRenderTargetView(pRenderTexture->GetRenderView().Get(), clr);
pContext->ClearDepthStencilView(pRenderTexture->GetDepthView().Get(), D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 1u, 0u);
pContext->OMSetRenderTargets(1u, pRenderTexture->GetRenderView().GetAddressOf(), pRenderTexture->GetDepthView().Get());
pContext->DrawIndexed(size, 0u, 0u);
pSwapChain->Present(0u, 0u);
Appearently i was clearing the depth buffer to 0 which indicates all depth values nearest possiible which leads to failing ztest.

DirectX11 Depth values only 0 and 1

I'm porting my OpenGL Renderer to DirectX. Almost everything work fine, but I have a problem with depth. Everything in 3D is rendering as flat. I've checked Visual Studio Graphics Analyzer and Depth Buffer and State is bind. If I check Depth/Stencil Texture Resource, there are only values 0.f (where something is on screen) and 1.f (where nothing is rendered). OpenGL renderer checked with gDEBugger shows values from 0.f to 1.f and OpenGL is working. I've also checked error codes. Everything returns S_OK. What could I do wrong?
Here is my DepthStencilState creation:
D3D11_DEPTH_STENCIL_DESC dsDescEn;
ZeroMemory(&dsDescEn, sizeof(D3D11_DEPTH_STENCIL_DESC));
dsDescEn.DepthEnable = TRUE;
dsDescEn.DepthWriteMask = D3D11_DEPTH_WRITE_MASK_ALL;
dsDescEn.DepthFunc = D3D11_COMPARISON_LESS;
dsDescEn.StencilEnable = TRUE;
dsDescEn.StencilReadMask = 0xFF;
dsDescEn.StencilWriteMask = 0xFF;
dsDescEn.FrontFace.StencilFailOp = D3D11_STENCIL_OP_KEEP;
dsDescEn.FrontFace.StencilDepthFailOp = D3D11_STENCIL_OP_INCR;
dsDescEn.FrontFace.StencilPassOp = D3D11_STENCIL_OP_KEEP;
dsDescEn.FrontFace.StencilFunc = D3D11_COMPARISON_ALWAYS;
dsDescEn.BackFace.StencilFailOp = D3D11_STENCIL_OP_KEEP;
dsDescEn.BackFace.StencilDepthFailOp = D3D11_STENCIL_OP_DECR;
dsDescEn.BackFace.StencilPassOp = D3D11_STENCIL_OP_KEEP;
dsDescEn.BackFace.StencilFunc = D3D11_COMPARISON_ALWAYS;
DirectX11Device::getDevice().getDevicePtr()->CreateDepthStencilState(&dsDescEn, &m_dSStateEn);
I'm binding State via OMSetDepthStencilState.
D3D11_TEXTURE2D_DESC depthStencilDesc;
depthStencilDesc.Width = width;
depthStencilDesc.Height = height;
depthStencilDesc.MipLevels = 1;
depthStencilDesc.ArraySize = 1;
depthStencilDesc.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
depthStencilDesc.SampleDesc.Count = 1;
depthStencilDesc.SampleDesc.Quality = 0;
depthStencilDesc.Usage = D3D11_USAGE_DEFAULT;
depthStencilDesc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
depthStencilDesc.CPUAccessFlags = 0;
depthStencilDesc.MiscFlags = 0;
D3D11_DEPTH_STENCIL_VIEW_DESC depthStencilViewDesc;
ZeroMemory(&depthStencilViewDesc, sizeof(D3D11_DEPTH_STENCIL_VIEW_DESC));
depthStencilViewDesc.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
depthStencilViewDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
depthStencilViewDesc.Texture2D.MipSlice = 0;
DirectX11Device::getDevice().getDevicePtr()->CreateTexture2D(&depthStencilDesc, NULL, &m_stencilDepthBuff);
DirectX11Device::getDevice().getDevicePtr()->CreateDepthStencilView(m_stencilDepthBuff, &depthStencilViewDesc, &m_stencilDepthView);
I've found a solution. The problem was matrices multiplication. I moved it to shader from C++ and now it's working without problems.

Rendering to texture - ClearRenderTargetView() works, but none objects are rendered to texture (rendering to screen works fine)

I try to render the scene to texture which should be then displayed in corner of the screen.
I though that I can do that this way:
Render the scene (my Engine::render() method that will set shaders and make draw calls) - works ok.
Change render target to the texture.
Render the scene again - does not work. The context->ClearRenderTargetView(texture->getRenderTargetView(), { 1.0f, 0.0f, 0.0f, 1.0f } ) does set my texture to red color (for scene in step 1. I use different color), but none objects are being rendered on it.
Change render target back to original.
Render the scene for the last time, with rectangle at corner that has the texture I've rendered in step 3. - works ok. I see the scene, the little rectangle in the corner too. The problem is, it's just red (something went wrong with rendering in step 3., I guess).
The result (there should be "image in image" instead of red rectangle):
The code for steps 2. - 4.:
context->OMSetRenderTargets(1, &textureRenderTargetView, depthStencilView);
float bg[4] = { 1.0f, 0.0f, 0.0f, 1.0f };
context->ClearRenderTargetView(textureRenderTargetView, bg); //backgroundColor - red, green, blue, alpha
render();
context->OMSetRenderTargets(1, &myRenderTargetView, depthStencilView); //bind render target back to previous value (not to texture)
The render() method does not change (it works in step 1., why it doesn't work when I render to texture?) and ends with swapChain->Present(0, 0).
I know that ClearRenderTargetView affects my texture (without it, it's doesn't change color to red). But the rest of rendering either do not output to it or there's another problem.
Did I miss something?
I create the texture, shader resource view and render target for it based on this tutorial (maybe there is an error in my D3D11_TEXTURE2D_DESC?):
D3D11_TEXTURE2D_DESC textureDesc;
D3D11_RENDER_TARGET_VIEW_DESC renderTargetViewDesc;
D3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc;
//1. create render target
ZeroMemory(&textureDesc, sizeof(textureDesc));
//setup the texture description
//we will need to have this texture bound as a render target AND a shader resource
textureDesc.Width = size.getX();
textureDesc.Height = size.getY();
textureDesc.MipLevels = 1;
textureDesc.ArraySize = 1;
textureDesc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
textureDesc.SampleDesc.Count = 1;
textureDesc.Usage = D3D11_USAGE_DEFAULT;
textureDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
textureDesc.CPUAccessFlags = 0;
textureDesc.MiscFlags = 0;
//create the texture
device->CreateTexture2D(&textureDesc, NULL, &textureRenderTarget);
//2. create render target view
//setup the description of the render target view.
renderTargetViewDesc.Format = textureDesc.Format;
renderTargetViewDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
renderTargetViewDesc.Texture2D.MipSlice = 0;
//create the render target view
device->CreateRenderTargetView(textureRenderTarget, &renderTargetViewDesc, &textureRenderTargetView);
//3. create shader resource view
//setup the description of the shader resource view.
shaderResourceViewDesc.Format = textureDesc.Format;
shaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
shaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
shaderResourceViewDesc.Texture2D.MipLevels = 1;
//create the shader resource view.
device->CreateShaderResourceView(textureRenderTarget, &shaderResourceViewDesc, &texture);
The depth buffer:
D3D11_TEXTURE2D_DESC descDepth;
ZeroMemory(&descDepth, sizeof(descDepth));
descDepth.Width = width;
descDepth.Height = height;
descDepth.MipLevels = 1;
descDepth.ArraySize = 1;
descDepth.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
descDepth.SampleDesc.Count = sampleCount;
descDepth.SampleDesc.Quality = maxQualityLevel;
descDepth.Usage = D3D11_USAGE_DEFAULT;
descDepth.BindFlags = D3D11_BIND_DEPTH_STENCIL;
descDepth.CPUAccessFlags = 0;
descDepth.MiscFlags = 0;
And here goes the swap chain:
DXGI_SWAP_CHAIN_DESC sd;
ZeroMemory(&sd, sizeof(sd));
sd.BufferCount = 1;
sd.BufferDesc.Width = width;
sd.BufferDesc.Height = height;
sd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
sd.BufferDesc.RefreshRate.Numerator = numerator; //60
sd.BufferDesc.RefreshRate.Denominator = denominator; //1
sd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
sd.OutputWindow = *hwnd;
sd.SampleDesc.Count = sampleCount; //1 (and 0 for quality) to turn off multisampling
sd.SampleDesc.Quality = maxQualityLevel;
sd.Windowed = fullScreen ? FALSE : TRUE;
sd.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH; //allow full-screen switchin
// Set the scan line ordering and scaling to unspecified.
sd.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
sd.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
// Discard the back buffer contents after presenting.
sd.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
I create the default render target view that way:
//create a render target view
ID3D11Texture2D* pBackBuffer = NULL;
result = swapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&pBackBuffer);
ERROR_HANDLE(SUCCEEDED(result), L"The swapChain->GetBuffer() failed.", MOD_GRAPHIC);
//Create the render target view with the back buffer pointer.
result = device->CreateRenderTargetView(pBackBuffer, NULL, &myRenderTargetView);
After some debugging, as #Gnietschow suggested, I have found an error:
D3D11 ERROR: ID3D11DeviceContext::OMSetRenderTargets:
The RenderTargetView at slot 0 is not compatable with the
DepthStencilView. DepthStencilViews may only be used with
RenderTargetViews if the effective dimensions of the Views are equal,
as well as the Resource types, multisample count, and multisample
quality.
The RenderTargetView at slot 0 has (w:1680,h:1050,as:1), while the
Resource is a Texture2D with (mc:1,mq:0).
The DepthStencilView has
(w:1680,h:1050,as:1), while the Resource is a Texture2D with
(mc:8,mq:16).
So basically, my render target (texture) was not using anti-aliasing while my back buffer/depth buffer do.
I had to change SampleDesc.Count to 1 and SampleDesc.Quality to 0 in both DXGI_SWAP_CHAIN_DESC and D3D11_TEXTURE2D_DESC to match the values from texture to which I render. In other words I had to turn off anti-aliasing when rendering to texture.
I wonder, why render to texture does not support anti-aliasing? When I set SampleDesc.Count and SampleDesc.Quality to my standard values (8 and 16, those works fine on my GPU when rendering the scene) for my texture render target, the device->CreateTexture2D(...) fails with "invalid parameter" (even when I use those same values everywhere).

RasterTek Drawing 2D Directx11 - I am having trouble With i think it is the Zbuffer

I am having trouble with the Z buffer, when i implement the 2D texture tut from rastertek, i get the image being drawn but when i navaigate the camera past a certain z pos, the 2D texture just do not show.
here is my code for the depthstencil
D3DXMatrixOrthoLH(&m_orthoMatrix, (float)screenWidth, (float)screenHeight, screenNear, screenDepth);
// Clear the second depth stencil state before setting the parameters.
ZeroMemory(&depthDisabledStencilDesc, sizeof(depthDisabledStencilDesc));
// Now create a second depth stencil state which turns off the Z buffer for 2D rendering. The only difference is
// that DepthEnable is set to false, all other parameters are the same as the other depth stencil state.
depthDisabledStencilDesc.DepthEnable = false;
depthDisabledStencilDesc.DepthWriteMask = D3D11_DEPTH_WRITE_MASK_ALL;
depthDisabledStencilDesc.DepthFunc = D3D11_COMPARISON_LESS;
depthDisabledStencilDesc.StencilEnable = true;
depthDisabledStencilDesc.StencilReadMask = 0xFF;
depthDisabledStencilDesc.StencilWriteMask = 0xFF;
depthDisabledStencilDesc.FrontFace.StencilFailOp = D3D11_STENCIL_OP_KEEP;
depthDisabledStencilDesc.FrontFace.StencilDepthFailOp = D3D11_STENCIL_OP_INCR;
depthDisabledStencilDesc.FrontFace.StencilPassOp = D3D11_STENCIL_OP_KEEP;
depthDisabledStencilDesc.FrontFace.StencilFunc = D3D11_COMPARISON_ALWAYS;
depthDisabledStencilDesc.BackFace.StencilFailOp = D3D11_STENCIL_OP_KEEP;
depthDisabledStencilDesc.BackFace.StencilDepthFailOp = D3D11_STENCIL_OP_DECR;
depthDisabledStencilDesc.BackFace.StencilPassOp = D3D11_STENCIL_OP_KEEP;
depthDisabledStencilDesc.BackFace.StencilFunc = D3D11_COMPARISON_ALWAYS;
heres the code for the zBuffer
void D3DClass::TurnZBufferOn()
{
m_deviceContext->OMSetDepthStencilState(m_depthStencilState, 1);
return;
}
void D3DClass::TurnZBufferOff()
{
m_deviceContext->OMSetDepthStencilState(m_depthDisabledStencilState, 1);
return;
}
I have been stuck on for soo long please help!!!

Depth stencil not working - DirectX 10 C++

I have a DirectX10 + C++ problem.
Basically we're at the early stages of rendering, and for some reason our depth stencil seems to be failing to understand our model. Basically, here is everything we are doing:
Load shader, model and texture
Initialize DirectX
Draw
The model, shader and texture all load and work correctly, however (as shown in the screenshot below), the depth stencil is clearly not doing its job and the shader is being used in the wrong places. I have also included our initialization method in case you need it to figure it out. We believe we have tried almost everything but knowing our luck we have probably missed out 1 line of important code ^.^
We also saw that someone else had the same problem, however their fix didn't work (their problem was that they had set the near clipping plane to 0.0, however ours is not 0.0 so that is not the problem)
Thanks in advance!
Problem screenshot
void GraphicsDeviceDirectX::InitGraphicsDevice(HWND hWnd)
{
DXGI_SWAP_CHAIN_DESC scd; // create a struct to hold various swap chain information
ZeroMemory(&scd, sizeof(DXGI_SWAP_CHAIN_DESC)); // clear out the struct for use
scd.BufferCount = 2; // create two buffers, one for the front, one for the back
scd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; // use 32-bit color
scd.BufferDesc.Height = 600;
scd.BufferDesc.Width = 600;
scd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT; // tell how the chain is to be used
scd.OutputWindow = hWnd; // set the window to be used by Direct3D
scd.SampleDesc.Count = 1; // set the level of multi-sampling
scd.SampleDesc.Quality = 0; // set the quality of multi-sampling
scd.Windowed = true; // set to windowed or full-screen mode
//set scan line ordering and scaling
scd.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
scd.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
//discard back buffer dontents
scd.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
//dont set advanced flags
scd.Flags = 0;
// create a device class and swap chain class using the information in the scd struct
if(FAILED(D3D10CreateDeviceAndSwapChain(NULL,
D3D10_DRIVER_TYPE_HARDWARE,
NULL,
D3D10_CREATE_DEVICE_DEBUG,
D3D10_SDK_VERSION,
&scd,
&swapchain,
&device)))
{
throw EngineException("Error creating graphics device");
}
//Push graphics device to Persistant Object Manager
//PerObjMan::Push(device);
//Push swapchain to Peristant Object Manager
PerObjMan::Push(swapchain);
// get the address of the back buffer and use it to create the render target
ID3D10Texture2D* pBackBuffer;
swapchain->GetBuffer(0, __uuidof(ID3D10Texture2D), (LPVOID*)&pBackBuffer);
device->CreateRenderTargetView(pBackBuffer, NULL, &rtv);
/*D3D10_TEXTURE2D_DESC descBack;
pBackBuffer->GetDesc(&descBack);*/
pBackBuffer->Release();
pBackBuffer = NULL;
//Push graphics device to Persistant Object Manager
PerObjMan::Push(rtv);
ID3D10Texture2D* pDepthStencil = NULL;
D3D10_TEXTURE2D_DESC descDepth;
ZeroMemory(&descDepth, sizeof(descDepth));
descDepth.Width = 600;
descDepth.Height = 600;
descDepth.MipLevels = 1;
descDepth.ArraySize = 1;
descDepth.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
descDepth.SampleDesc.Count = 1;
descDepth.SampleDesc.Quality = 0;
descDepth.Usage = D3D10_USAGE_DEFAULT;
descDepth.BindFlags = D3D10_BIND_DEPTH_STENCIL;
descDepth.CPUAccessFlags = 0;
descDepth.MiscFlags = 0;
HRESULT hr;
hr = GetGraphicsDevice()->CreateTexture2D( &descDepth, NULL, &pDepthStencil );
if(FAILED(hr))
throw EngineException("FAIL");
PerObjMan::Push(pDepthStencil);
D3D10_DEPTH_STENCIL_DESC dsDesc;
ZeroMemory(&dsDesc, sizeof(dsDesc));
// Depth test parameters
dsDesc.DepthEnable = true;
dsDesc.DepthWriteMask = D3D10_DEPTH_WRITE_MASK::D3D10_DEPTH_WRITE_MASK_ALL;
dsDesc.DepthFunc = D3D10_COMPARISON_FUNC::D3D10_COMPARISON_LESS;
// Stencil test parameters
dsDesc.StencilEnable = false;
dsDesc.StencilReadMask = 0xFF;
dsDesc.StencilWriteMask = 0xFF;
// Stencil operations if pixel is front-facing.
dsDesc.FrontFace.StencilFailOp = D3D10_STENCIL_OP_KEEP;
dsDesc.FrontFace.StencilDepthFailOp = D3D10_STENCIL_OP_INCR;
dsDesc.FrontFace.StencilPassOp = D3D10_STENCIL_OP_KEEP;
dsDesc.FrontFace.StencilFunc = D3D10_COMPARISON_ALWAYS;
// Stencil operations if pixel is back-facing.
dsDesc.BackFace.StencilFailOp = D3D10_STENCIL_OP_KEEP;
dsDesc.BackFace.StencilDepthFailOp = D3D10_STENCIL_OP_DECR;
dsDesc.BackFace.StencilPassOp = D3D10_STENCIL_OP_KEEP;
dsDesc.BackFace.StencilFunc = D3D10_COMPARISON_ALWAYS;
// Create depth stencil state
hr = device->CreateDepthStencilState(&dsDesc, &dss);
if(FAILED(hr))
throw EngineException("FAIL");
// Bind depth stencil state
device->OMSetDepthStencilState(dss, 1);
PerObjMan::Push(dss);
D3D10_DEPTH_STENCIL_VIEW_DESC descDSV;
ZeroMemory(&descDSV, sizeof(descDSV));
descDSV.Format = descDepth.Format;
descDSV.ViewDimension = D3D10_DSV_DIMENSION::D3D10_DSV_DIMENSION_TEXTURE2D;
descDSV.Texture2D.MipSlice = 0;
// Create the depth stencil view
hr = device->CreateDepthStencilView( pDepthStencil, // Depth stencil texture
&descDSV, // Depth stencil desc
&dsv ); // [out] Depth stencil view
if(FAILED(hr))
throw EngineException("FAIL");
PerObjMan::Push(dsv);
// Bind the depth stencil view
device->OMSetRenderTargets( 1, // One rendertarget view
&rtv, // Render target view, created earlier
dsv); // Depth stencil view for the render target
D3D10_VIEWPORT viewport; // create a struct to hold the viewport data
ZeroMemory(&viewport, sizeof(D3D10_VIEWPORT)); // clear out the struct for use
GameToImplement::GameInfo::Info info = GameToImplement::GameInfo::GetGameInfo();
RECT rect;
int width = 0;
int height = 0;
if(GetClientRect(hWnd, &rect))
{
width = rect.right - rect.left;
height = rect.bottom - rect.top;
}
else
{
throw EngineException("");
}
viewport.TopLeftX = 0; // set the left to 0
viewport.TopLeftY = 0; // set the top to 0
viewport.Width = 600; // set the width to the window's width
viewport.Height = 600; // set the height to the window's height
viewport.MinDepth = 0.0f;
viewport.MaxDepth = 1.0f;
device->RSSetViewports(1, &viewport); // set the viewport
}
I fixed it, thanks to catflier's nod in the right direction. Turns out I was actually releasing the rasterizer state too early for the depth stencil to be used.
I'll leave this answer here for anyone who has the same problem.