Output fps in someone else's game - c++

Good afternoon everyone. I want to make a conclusion fps in someone else's game. I'm getting into the game process, in this case the DirectX 11 application is a demo from Unity.
I start the demo, put the window mode 800x600. Further I load there my dll. Everything is fine, the frame counter is drawn.
If you switch to full-screen mode (Alt + Enter), the counter disappears,
that is not drawn on the screen. If we go back, then in the windowed mode it is displayed again normally.
If you run the demo immediately in full-screen mode at a resolution equal to the screen resolution (for me it is 1440 by 900), then the counter
is displayed normally.
What could be the reason? I have the impression that there are two textures,
in which the rendering takes place, and depending on the resolution
is one of them displayed?
Here's the SwapChain data after the Present () hook:
DXGI_SWAP_CHAIN_DESC swapChainDesc = {};
hr = swap->GetDesc(&swapChainDesc);
swapChainDesc.BufferDesc.Width = 800
swapChainDesc.BufferDesc.Height = 600
swapChainDesc.BufferDesc.RefreshRate.Numerator = 0
swapChainDesc.BufferDesc.RefreshRate.Denominator = 1
swapChainDesc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM (28)
swapChainDesc.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED (0)
swapChainDesc.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED (0)
swapChainDesc.SampleDesc.Count = 2
swapChainDesc.SampleDesc.Quality = 0
swapChainDesc.BufferUsage = 48
swapChainDesc.BufferCount = 1
swapChainDesc.OutputWindow = 0x000000000006087c {unused=71304240 }
swapChainDesc.Windowed = 1
swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD (0)
swapChainDesc.Flags = 2
I suspect that this is affected by the parameter: swapChainDesc.SampleDesc.Count = 2
Also what then to do? How to display text?
Is this Multi-Sample Anti-Aliasing affecting it or not?
In the window (800x600):
In full screen mode (800x600):
In full screen mode (1440x900, screen resolution):

Related

Why does DirectXToolkit ruin my depth testing

I'm sure I'm just missing some simple step that I've been too blind to notice so far, but I cannot seem to get depth testing to work at all. This is with DirectX 11.
The code that should set it all up:
DXGI_SWAP_CHAIN_DESC swapDesc = { };
swapDesc.BufferDesc.Width = 0;
swapDesc.BufferDesc.Height = 0;
swapDesc.BufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
swapDesc.BufferDesc.RefreshRate.Numerator = 0;
swapDesc.BufferDesc.RefreshRate.Denominator = 1;
swapDesc.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
swapDesc.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
swapDesc.SampleDesc.Count = 1;
swapDesc.SampleDesc.Quality = 0;
swapDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapDesc.BufferCount = 1;
swapDesc.OutputWindow = hwnd;
swapDesc.Windowed = TRUE;
swapDesc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
swapDesc.Flags = 0;
UINT flg = 0;
#if MAGE_DEBUG
flg |= D3D11_CREATE_DEVICE_DEBUG;
#endif
GFX_THROW_INFO(D3D11CreateDeviceAndSwapChain(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr,
flg,
nullptr, 0,
D3D11_SDK_VERSION, &swapDesc, &mSwap, &mDevice, nullptr,
&mContext));
COMptr<ID3D11Resource> backBuffer;
GFX_THROW_INFO(mSwap->GetBuffer(0, __uuidof(ID3D11Resource), &backBuffer));
GFX_THROW_INFO(mDevice->CreateRenderTargetView(backBuffer.Get(), nullptr, &mTarget));
LOG_INFO("Setting depth stencil dimensions ({}, {})", width, height);
COMptr<ID3D11Texture2D> depthStencil;
D3D11_TEXTURE2D_DESC texDesc = { };
texDesc.Width = width;
texDesc.Height = height;
texDesc.MipLevels = 1;
texDesc.ArraySize = 1;
texDesc.Format = DXGI_FORMAT_D32_FLOAT;
texDesc.SampleDesc.Count = 1;
texDesc.SampleDesc.Quality = 0;
texDesc.Usage = D3D11_USAGE_DEFAULT;
texDesc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
GFX_THROW_INFO(mDevice->CreateTexture2D(&texDesc, nullptr, &depthStencil));
D3D11_DEPTH_STENCIL_DESC depth = { };
depth.DepthEnable = TRUE;
depth.DepthWriteMask = D3D11_DEPTH_WRITE_MASK_ALL;
depth.DepthFunc = D3D11_COMPARISON_LESS;
COMptr<ID3D11DepthStencilState> depthState;
GFX_THROW_INFO(mDevice->CreateDepthStencilState(&depth, &depthState));
D3D11_DEPTH_STENCIL_VIEW_DESC dsvDesc = { };
dsvDesc.Format = DXGI_FORMAT_D32_FLOAT;
dsvDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
dsvDesc.Texture2D.MipSlice = 0;
GFX_THROW_INFO(mDevice->CreateDepthStencilView(depthStencil.Get(), &dsvDesc, &mDepthStencilView));
mContext->OMSetDepthStencilState(depthState.Get(), 1);
mContext->OMSetRenderTargets(1, mTarget.GetAddressOf(), mDepthStencilView.Get());
LOG_INFO("Setting viewport dimensions ({}, {})", width, height);
D3D11_VIEWPORT vp;
vp.Width = (float) width;
vp.Height = (float) height;
vp.MinDepth = 0.0f;
vp.MaxDepth = 1.0f;
vp.TopLeftX = 0.0f;
vp.TopLeftY = 0.0f;
mContext->RSSetViewports(1, &vp);
And of course, before every frame I call the following:
mContext->ClearRenderTargetView(mTarget.Get(), color);
mContext->ClearDepthStencilView(mDepthStencilView.Get(), D3D11_CLEAR_DEPTH, 1.0f, 0);
But unfortunately, the result ends up being this (note that the crysis nanosuit model is behind the goblin head) I believe this could potentially also be why the goblin model is rendering incorrectly even when alone but haven't figured that one out yet.
Example 1
And with just the goblin, looking from an angle
Example 2
If anyone can help me figure out why its not working, I'd greatly appreciate it!
EDIT
After some more frustrating testing I discovered the depth testing was broken because of some test text rendering I was doing with DirectX ToolKit's SpriteBatch and SpriteFont classes. Has anyone come across this issue before? I don't really want/need the toolkit for anything other than text rendering and perhaps loading dds textures so I'm hoping if I want to use those classes I don't need to drastically change my existing code?
DirectX Tool Kit does not 'capture/restore' state like the legacy D3DX9/D3DX10 sprite did. This was inefficient and relied on some hacky back-door functionality to capture the 'state block' for Direct3D 10+. In most cases, you are already going to set the bulk of the commonly used state to set up for the next draw call anyhow.
Instead, I have fully documented all state impacted by each class. You are expected to change all required state after the DirectX Tool Kit object renders. For example, SpriteBatch docs state:
SpriteBatch makes use of the following states:
BlendState
Constant buffer (Vertex Shader stage, slot 0)
DepthStencilState
Index buffer
Input layout
Pixel shader
Primitive topology
RasterizerState
SamplerState (Pixel Shader stage, slot 0)
Shader resources (Pixel Shader stage, slot 0)
Vertex buffer (slot 0)
Vertex shader
So in short, you just need to set the DepthStencilState to what you want to use after you call SpriteBatch::End.
As a general habit for state management, you should set all state you rely on every frame. While in Direct3D 11 the 'last state' at the time you call Present is still there at the start of the next frame, this isn't true of DirectX 12. As such, you should make a habit of at the start of a new frame setting everything like current render target, viewport, render states you expect to be present for your whole scene, etc.
For example, most "HUD" rendering is done last, so the state changes by SpriteBatch would normally be reset on the next frame's start--again, assuming you set up the required state at the start of the frame rather than assuming it remains unchanged over many frames.
TL;DR: Move this code to just after you clear the render target each frame:
mContext->OMSetDepthStencilState(depthState.Get(), 1);
mContext->OMSetRenderTargets(1, mTarget.GetAddressOf(), mDepthStencilView.Get());
D3D11_VIEWPORT vp = { 0.f, 0.f, float(width), float(height), D3D11_MIN_DEPTH, D3D11_MAX_DEPTH };
mContext->RSSetViewports(1, &vp);

Direct3D 12 windowed mode forces vsync

I am writing a simple Direct3D 12 application to prepare for the release of Vulkan, and it works as expected in all regards but one: running in a bordered window restricts the framerate to 60fps, even with vsync disabled. What puzzles me: the same program in a fullscreen window runs at nearly 4000fps.
Using a dirty self-made profiler, I found that the hangup occurs in this section of my code, which waits until the last frame completes before starting to work on the next one.
if (m_fence->GetCompletedValue() < endFenceValue)
{
result = m_fence->SetEventOnCompletion(endFenceValue, m_fenceEvent);
if (result != S_OK) return false;
WaitForSingleObject(m_fenceEvent, INFINITE); //Program stalls here
}
//m_fence is a pointer to an ID3D12Fence object
//endFenceValue is an unsigned long long
//m_fenceEvent is a HANDLE
The code used to present the rendered frame is ordinary:
if (m_vsync)
{
result = m_swapChain->Present(1, 0);
if (result != S_OK) return 0;
}
else
{
result = m_swapChain->Present(0, 0);
if (result != S_OK) return 0;
}
//Increase the fence value
result = m_commandQueue->Signal(m_fence, m_fenceValue);
if (result != S_OK) return 0;
return m_fenceValue++;
//m_swapChain is a pointer to an IDXGISwapChain3 object
//m_commandQueue is a pointer to an ID3D12CommandQueue object
//m_fenceValue is a HANDLE
Note: the first block of code uses the return value from the above function as endFenceValue.
The swap chain I am using is set up like so:
swapChainDesc.BufferCount = 2; //Double buffered
swapChainDesc.BufferDesc.Width = width; //Set width
swapChainDesc.BufferDesc.Height = height; //Set height
swapChainDesc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; //32-bit back buffers
swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT; //Set the back buffers to be used as render targets
swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_DISCARD; //Throw out old back buffer contents after getting a new frame
swapChainDesc.OutputWindow = window;
swapChainDesc.Windowed = !fullscreen;
//Auto-detect the refresh rate
swapChainDesc.BufferDesc.RefreshRate.Numerator = 0;
swapChainDesc.BufferDesc.RefreshRate.Denominator = 0;
//No multisampling for now
swapChainDesc.SampleDesc.Count = 1;
swapChainDesc.SampleDesc.Quality = 0;
//Set the scan line ordering and scaling to unspecified
swapChainDesc.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
swapChainDesc.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
//Allow to switch between windowed and fullscreen modes
//Also changes the monitor resolution to match the width and height of the window in fullscreen mode
swapChainDesc.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH;
For those interested, I am using SDL to create the window, but writing my own WinMain did nothing to fix the problem. I have also tried inspecting my vsync settings in the nVidia Control Panel, exiting fl.ux (but not uninstalling it), and changing my performance settings in System Properties.
Can anyone provide an explanation or solution for this?
The refresh rate cap with windowed swap chain has been removed with windows 10 build version 10586.
Update your windows and it should solve itself.
According to nVidia Developer Zone, this is actually expected behavior. Currently, the only way to have uncapped framerates is with a fullscreen window.

Direct3D 11 depth buffer result in black screen

I tried to implement the depth buffer in Direct3D 11, but it turns out to not be as easy as I fought. Here's my problem : Whenever I'm calling this function :
m_DeviceContext->OMSetRenderTargets(1, &m_RTV, m_DepthStencilView);
With the 3rd parameter as my depth stencil view, I see nothing in my window ; everything is black. If I put nullptr then all my geometries are rendered without any depth testing (like it was before I atempt to implement depth). Here's how I'm creating my depth buffer and my depth stencil view :
D3D11_TEXTURE2D_DESC depthBufferDesc;
ZeroMemory(&depthBufferDesc, sizeof(D3D11_TEXTURE2D_DESC));
depthBufferDesc.Width = iwidth;
depthBufferDesc.Height = iheight;
depthBufferDesc.MipLevels = 1;
depthBufferDesc.ArraySize = 1;
depthBufferDesc.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
depthBufferDesc.SampleDesc.Count = 1;
depthBufferDesc.SampleDesc.Quality = 0;
depthBufferDesc.Usage = D3D11_USAGE_DEFAULT;
depthBufferDesc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
hr = m_Device->CreateTexture2D(&depthBufferDesc, nullptr, &m_DepthStencilBuffer);
hr = m_Device->CreateDepthStencilView(m_DepthStencilBuffer, nullptr, &m_DepthStencilView);
I search for quite some time to find the solution but I didn't help me. I also checked the viewport but it doesn't work. I also clean up the view every frame.
Can you help me out please ? I'll appreciate it :) Thanks.
You probably forgot to clear your depth buffer to 1.0 before trying to use it.

`E_FAIL` when creating DirectX 10 Device and Swap Chain - _com_error

I am working though some simple DX tutorials, and have hit an early snag. I am working on both an old laptop and a new PC, so I'm using d3d10_1.lib which lets me use a 9 feature set. The PC, however, does support all the way to DX11, so nothing should be a problem on there.
So here's the function where it fails:
bool DirectX9Renderer::Initialise(HWND* handle)
{
//window handle
hWnd = handle;
//get window dimensions
RECT rc;
GetClientRect( *hWnd, &rc );
UINT width = rc.right - rc.left;
UINT height = rc.bottom - rc.top;
DXGI_SWAP_CHAIN_DESC swapChainDesc;
ZeroMemory(&swapChainDesc, sizeof(swapChainDesc));
//set buffer dimensions and format
swapChainDesc.BufferCount = 2;
swapChainDesc.BufferDesc.Width = width;
swapChainDesc.BufferDesc.Height = height;
swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDesc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;;
//set refresh rate
swapChainDesc.BufferDesc.RefreshRate.Numerator = 60;
swapChainDesc.BufferDesc.RefreshRate.Denominator = 1;
//sampling settings
swapChainDesc.SampleDesc.Quality = 0;
swapChainDesc.SampleDesc.Count = 1;
//output window handle
swapChainDesc.OutputWindow = *hWnd;
swapChainDesc.Windowed = true;
HRESULT result = D3D10CreateDeviceAndSwapChain1( // this is line 57
NULL,
D3D10_DRIVER_TYPE_HARDWARE,
NULL,
D3D10_CREATE_DEVICE_SINGLETHREADED | D3D10_CREATE_DEVICE_DEBUG,
D3D10_FEATURE_LEVEL_9_1,
D3D10_1_SDK_VERSION,
&swapChainDesc,
&pSwapChain,
&pD3DDevice
);
if(FAILED(result))
{
return FatalError("D3D device creation failed");
}
// there's more stuff after this, but I don't get that far
}
So the call to D3D10CreateDeviceAndSwapChain1 fails with the less-helpful error code E_FAIL.
There is a line in the Debug output too:
First-chance exception at 0x770f56c4 in TileTest.exe: Microsoft C++ exception: _com_error at memory location 0x00b6e8d4..
I have tried using D3D10_DRIVER_TYPE_REFERENCE and different D3D10_FEATURE_LEVEL_xx values, but it doesn't seem to work.
I think the problem may have been to do with the D3D10_CREATE_DEVICE_FLAG I sent in. I changed the D3D10_CREATE_DEVICE_SINGLETHREADED | D3D10_CREATE_DEVICE_DEBUG to 0 and it now works.
I tried to create the device inside a VMware virtual machine. It failed (device stayed NULL) until I changed the requested FEATURE_LEVEL from D3D10_FEATURE_LEVEL_10_1 to D3D10_FEATURE_LEVEL_9_3. I've heard this also helps other PCs with real hardware.

D3D11: How to draw GDI Text to a GXDI Surface? (Without D2D)

I need some help with drawing a text to a texture with GDI and D3D11. I tried using D2D/DirectWrite, but it supports just D3D10 and not D3D11 as I need. Everything I tried failed so far...
Now I want to use GDI methodes to write in the texture.
So I created a texture with this params:
Usage = D3D11_USAGE_DEFAULT;
Format = DXGI_FORMAT_B8G8R8A8_UNORM;
BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
CPUAccessFlags = 0;
MiscFlags = D3D11_RESOURCE_MISC_GDI_COMPATIBLE
Then I created a normal RenderTargetView from this texture as Microsoft sais here: http://msdn.microsoft.com/en-us/library/ff476203%28v=vs.85%29.aspx
Next Step: Get The DXGI Interface:
m_pTexFSText->QueryInterface(__uuidof(IDXGISurface1), (void **)(&m_pDXGISurface));
On the Render function I do just this:
m_pDeviceContext->OMSetRenderTargets(1,&m_pTextRenderTarget,NULL);
HDC hDc = NULL;
if(FAILED(m_pDXGISurface->GetDC(TRUE,&hDc)))
return E_FAIL;
COLORREF bla = SetPixel(hDc,1,1,RGB(255,255,255));
bool hmm = TextOutA(hDc, 10, 10, "LALALA!", 7);
if(FAILED(m_pDXGISurface->ReleaseDC(NULL)))
return E_FAIL;
The problem is, that the texture is still empty after that GDI drawing (Also tested with PIX).
Everything works and there are no error messages.
I hope that anybody can explain how it works.
Thanks, Stefan
EDIT: Tried it also with GetDC(FALSE,&hDc) (according to the documentation): same results -> nothing.
I actually fought this problem a lot during last week - but I've got it all working! Here is a list of things you should know/do to make it all work:
Check the surface requirements for a GetDC method to work here: http://msdn.microsoft.com/en-us/library/windows/desktop/ff471345(v=vs.85).aspx
Keep the following in mind when using this method:
•You must create the surface by using the D3D11_RESOURCE_MISC_GDI_COMPATIBLE flag for a surface or by using the DXGI_SWAP_CHAIN_FLAG_GDI_COMPATIBLE flag for swap chains, otherwise this method fails.
•You must release the device and call the IDXGISurface1::ReleaseDC method before you issue any new Direct3D commands.
•This method fails if an outstanding DC has already been created by this method.
•The format for the surface or swap chain must be DXGI_FORMAT_B8G8R8A8_UNORM_SRGB or DXGI_FORMAT_B8G8R8A8_UNORM.
•On GetDC, the render target in the output merger of the Direct3D pipeline is unbound from the surface. You must call the ID3D11DeviceContext::OMSetRenderTargets method on the device prior to Direct3D rendering after GDI rendering.
•Prior to resizing buffers you must release all outstanding DCs.
If you're going to use it in the back buffer, remember to re-bind render target after you've called ReleaseDC. It is not neccessary to manually unbind RT before calling GetDC as this method does that for you.
You can not use any Direct3D drawing between GetDC() and ReleaseDC() calls as the surface is excusively locked out by DXGI for GDI. However you can mix GDI and D3D rendering provided that you call GetDC()/ReleaseDC() every time you need to use GDI, before moving on to D3D.
This last bit may sounds easy, but you'd be surprised how many developers fall into this issue - when you draw with GDI on the back buffer, remember that this is the back buffer, not a framebuffer, so in order to actually see what you've drawn, you have to re-bind RT to OM and call swapChain->Present() method so the backbuffer will become a framebuffer and its contents will be displayed on the screen.
Maybe you're doing everything fine, it's just the text drawing doesn't do what you expect?
COLORREF bla = SetPixel(hDc,1,1,RGB(255,255,255));
bool hmm = TextOutA(hDc, 10, 10, "LALALA!", 7);
I don't understand from this how do you expect that TextOutA will guess that bla should be used as the text color. AFAIK the default text color used in the newly created/obtained DC is black. Not sure about the background fill mode, but if it's TRANSPARENT by default - this fully explains why nothing is drawing.
I'd change your code to the following:
COLORREF bla = SetPixel(hDc,1,1,RGB(255,255,255));
VERIFY(SetTextColor(hDc, bla) != CLR_INVALID);
CREct rc(0, 0, 30, 20); // put relevant coordinates
VERIFY(ExtTextOut(hDc, rc.left, rc.top, ETO_CLIPPED, &rc, "LALALA!", 7));
I am going to use it in the back buffer. I am not sure if it's done correctly. I can't see the drawing. It's showing black.
HDC GetSurfaceDC()
{
m_pSurface1 = nullptr;
HDC hdc{};
//Setup the swapchain surface
IF_FAILED_THROW_HR(m_swapChain->GetBuffer(0, IID_PPV_ARGS(&m_pSurface1)));
// Obtain the back buffer for this window which will be the final 3D render target.
ID3D11Texture2DPtr backBuffer;
IF_FAILED_THROW_HR(m_swapChain->GetBuffer(0, IID_PPV_ARGS(&backBuffer)));
// Create a descriptor for the RenderTargetView.
CD3D11_RENDER_TARGET_VIEW_DESC renderTargetViewDesc(D3D11_RTV_DIMENSION_TEXTURE2DARRAY, DXGI_FORMAT_B8G8R8A8_UNORM, 0, 0, 1);
ID3D11RenderTargetViewPtr renderTargetView;
// Create a view interface on the render target to use on bind for mono or left eye view.
IF_FAILED_THROW_HR(m_device->CreateRenderTargetView(backBuffer, &renderTargetViewDesc, &renderTargetView));
m_context->OMSetRenderTargets(1, &renderTargetView.GetInterfacePtr(), nullptr);
IF_FAILED_THROW_HR(m_pSurface1->GetDC(FALSE, &hdc));
return hdc;
}
void ReleaseSurfaceDC()
{
if (m_pSurface1 == nullptr)
return;
//When finish drawing release the DC
m_pSurface1->ReleaseDC(nullptr);
m_context->OMSetRenderTargets(1, &m_renderTargetView.GetInterfacePtr(), m_depthStencilView);
}
I have used swap chain desc:
DXGI_SWAP_CHAIN_DESC swapChainDesc = { 0 };
swapChainDesc.BufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
swapChainDesc.Flags = DXGI_SWAP_CHAIN_FLAG_GDI_COMPATIBLE;