Access violation in DirectX OMSetRenderTargets - c++

I receive the following error (Unhandled exception at 0x527DAE81 (d3d11_1sdklayers.dll) in Lesson2.Triangles.exe: 0xC0000005: Access violation reading location 0x00000000) when running the Triangle sample application for DirectX 11 in D3D_FEATURE_LEVEL_9_1. This error occurs at the OMSetRenderTargets function, as shown below, and does not happen if I remove that function from the program (but then, the screen is blue, and does not render the triangle)
//// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
//// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
//// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
//// PARTICULAR PURPOSE.
////
//// Copyright (c) Microsoft Corporation. All rights reserved
#include
#include
#include "DirectXSample.h"
#include "BasicMath.h"
#include "BasicReaderWriter.h"
using namespace Microsoft::WRL;
using namespace Windows::UI::Core;
using namespace Windows::Foundation;
using namespace Windows::ApplicationModel::Core;
using namespace Windows::ApplicationModel::Infrastructure;
// This class defines the application as a whole.
ref class Direct3DTutorialViewProvider : public IViewProvider
{
private:
CoreWindow^ m_window;
ComPtr m_swapChain;
ComPtr m_d3dDevice;
ComPtr m_d3dDeviceContext;
ComPtr m_renderTargetView;
public:
// This method is called on application launch.
void Initialize(
_In_ CoreWindow^ window,
_In_ CoreApplicationView^ applicationView
)
{
m_window = window;
}
// This method is called after Initialize.
void Load(_In_ Platform::String^ entryPoint)
{
}
// This method is called after Load.
void Run()
{
// First, create the Direct3D device.
// This flag is required in order to enable compatibility with Direct2D.
UINT creationFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
#if defined(_DEBUG)
// If the project is in a debug build, enable debugging via SDK Layers with this flag.
creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
#endif
// This array defines the ordering of feature levels that D3D should attempt to create.
D3D_FEATURE_LEVEL featureLevels[] =
{
D3D_FEATURE_LEVEL_11_1,
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
D3D_FEATURE_LEVEL_9_3,
D3D_FEATURE_LEVEL_9_1
};
ComPtr d3dDevice;
ComPtr d3dDeviceContext;
DX::ThrowIfFailed(
D3D11CreateDevice(
nullptr, // specify nullptr to use the default adapter
D3D_DRIVER_TYPE_HARDWARE,
nullptr, // leave as nullptr if hardware is used
creationFlags, // optionally set debug and Direct2D compatibility flags
featureLevels,
ARRAYSIZE(featureLevels),
D3D11_SDK_VERSION, // always set this to D3D11_SDK_VERSION
&d3dDevice,
nullptr,
&d3dDeviceContext
)
);
// Retrieve the Direct3D 11.1 interfaces.
DX::ThrowIfFailed(
d3dDevice.As(&m_d3dDevice)
);
DX::ThrowIfFailed(
d3dDeviceContext.As(&m_d3dDeviceContext)
);
// After the D3D device is created, create additional application resources.
CreateWindowSizeDependentResources();
// Create a Basic Reader-Writer class to load data from disk. This class is examined
// in the Resource Loading sample.
BasicReaderWriter^ reader = ref new BasicReaderWriter();
// Load the raw vertex shader bytecode from disk and create a vertex shader with it.
auto vertexShaderBytecode = reader->ReadData("SimpleVertexShader.cso");
ComPtr vertexShader;
DX::ThrowIfFailed(
m_d3dDevice->CreateVertexShader(
vertexShaderBytecode->Data,
vertexShaderBytecode->Length,
nullptr,
&vertexShader
)
);
// Create an input layout that matches the layout defined in the vertex shader code.
// For this lesson, this is simply a float2 vector defining the vertex position.
const D3D11_INPUT_ELEMENT_DESC basicVertexLayoutDesc[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
};
ComPtr inputLayout;
DX::ThrowIfFailed(
m_d3dDevice->CreateInputLayout(
basicVertexLayoutDesc,
ARRAYSIZE(basicVertexLayoutDesc),
vertexShaderBytecode->Data,
vertexShaderBytecode->Length,
&inputLayout
)
);
// Load the raw pixel shader bytecode from disk and create a pixel shader with it.
auto pixelShaderBytecode = reader->ReadData("SimplePixelShader.cso");
ComPtr pixelShader;
DX::ThrowIfFailed(
m_d3dDevice->CreatePixelShader(
pixelShaderBytecode->Data,
pixelShaderBytecode->Length,
nullptr,
&pixelShader
)
);
// Create vertex and index buffers that define a simple triangle.
float3 triangleVertices[] =
{
float3(-0.5f, -0.5f,13.5f),
float3( 0.0f, 0.5f,0),
float3( 0.5f, -0.5f,0),
};
D3D11_BUFFER_DESC vertexBufferDesc = {0};
vertexBufferDesc.ByteWidth = sizeof(float3) * ARRAYSIZE(triangleVertices);
vertexBufferDesc.Usage = D3D11_USAGE_DEFAULT;
vertexBufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
vertexBufferDesc.CPUAccessFlags = 0;
vertexBufferDesc.MiscFlags = 0;
vertexBufferDesc.StructureByteStride = 0;
D3D11_SUBRESOURCE_DATA vertexBufferData;
vertexBufferData.pSysMem = triangleVertices;
vertexBufferData.SysMemPitch = 0;
vertexBufferData.SysMemSlicePitch = 0;
ComPtr vertexBuffer;
DX::ThrowIfFailed(
m_d3dDevice->CreateBuffer(
&vertexBufferDesc,
&vertexBufferData,
&vertexBuffer
)
);
// Once all D3D resources are created, configure the application window.
// Allow the application to respond when the window size changes.
m_window->SizeChanged +=
ref new TypedEventHandler(
this,
&Direct3DTutorialViewProvider::OnWindowSizeChanged
);
// Specify the cursor type as the standard arrow cursor.
m_window->PointerCursor = ref new CoreCursor(CoreCursorType::Arrow, 0);
// Activate the application window, making it visible and enabling it to receive events.
m_window->Activate();
// Enter the render loop. Note that tailored applications should never exit.
while (true)
{
// Process events incoming to the window.
m_window->Dispatcher->ProcessEvents(CoreProcessEventsOption::ProcessAllIfPresent);
// Specify the render target we created as the output target.
ID3D11RenderTargetView* targets[1] = {m_renderTargetView.Get()};
m_d3dDeviceContext->OMSetRenderTargets(
1,
targets,
NULL // use no depth stencil
);
// Clear the render target to a solid color.
const float clearColor[4] = { 0.071f, 0.04f, 0.561f, 1.0f };
//Code fails here
m_d3dDeviceContext->ClearRenderTargetView(
m_renderTargetView.Get(),
clearColor
);
m_d3dDeviceContext->IASetInputLayout(inputLayout.Get());
// Set the vertex and index buffers, and specify the way they define geometry.
UINT stride = sizeof(float3);
UINT offset = 0;
m_d3dDeviceContext->IASetVertexBuffers(
0,
1,
vertexBuffer.GetAddressOf(),
&stride,
&offset
);
m_d3dDeviceContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
// Set the vertex and pixel shader stage state.
m_d3dDeviceContext->VSSetShader(
vertexShader.Get(),
nullptr,
0
);
m_d3dDeviceContext->PSSetShader(
pixelShader.Get(),
nullptr,
0
);
// Draw the cube.
m_d3dDeviceContext->Draw(3,0);
// Present the rendered image to the window. Because the maximum frame latency is set to 1,
// the render loop will generally be throttled to the screen refresh rate, typically around
// 60Hz, by sleeping the application on Present until the screen is refreshed.
DX::ThrowIfFailed(
m_swapChain->Present(1, 0)
);
}
}
// This method is called before the application exits.
void Uninitialize()
{
}
private:
// This method is called whenever the application window size changes.
void OnWindowSizeChanged(
_In_ CoreWindow^ sender,
_In_ WindowSizeChangedEventArgs^ args
)
{
m_renderTargetView = nullptr;
CreateWindowSizeDependentResources();
}
// This method creates all application resources that depend on
// the application window size. It is called at app initialization,
// and whenever the application window size changes.
void CreateWindowSizeDependentResources()
{
if (m_swapChain != nullptr)
{
// If the swap chain already exists, resize it.
DX::ThrowIfFailed(
m_swapChain->ResizeBuffers(
2,
0,
0,
DXGI_FORMAT_R8G8B8A8_UNORM,
0
)
);
}
else
{
// If the swap chain does not exist, create it.
DXGI_SWAP_CHAIN_DESC1 swapChainDesc = {0};
swapChainDesc.Stereo = false;
swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDesc.Scaling = DXGI_SCALING_NONE;
swapChainDesc.Flags = 0;
// Use automatic sizing.
swapChainDesc.Width = 0;
swapChainDesc.Height = 0;
// This is the most common swap chain format.
swapChainDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
// Don't use multi-sampling.
swapChainDesc.SampleDesc.Count = 1;
swapChainDesc.SampleDesc.Quality = 0;
// Use two buffers to enable flip effect.
swapChainDesc.BufferCount = 2;
// We recommend using this swap effect for all applications.
swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL;
// Once the swap chain description is configured, it must be
// created on the same adapter as the existing D3D Device.
// First, retrieve the underlying DXGI Device from the D3D Device.
ComPtr dxgiDevice;
DX::ThrowIfFailed(
m_d3dDevice.As(&dxgiDevice)
);
// Ensure that DXGI does not queue more than one frame at a time. This both reduces
// latency and ensures that the application will only render after each VSync, minimizing
// power consumption.
DX::ThrowIfFailed(
dxgiDevice->SetMaximumFrameLatency(1)
);
// Next, get the parent factory from the DXGI Device.
ComPtr dxgiAdapter;
DX::ThrowIfFailed(
dxgiDevice->GetAdapter(&dxgiAdapter)
);
ComPtr dxgiFactory;
DX::ThrowIfFailed(
dxgiAdapter->GetParent(
__uuidof(IDXGIFactory2),
&dxgiFactory
)
);
// Finally, create the swap chain.
DX::ThrowIfFailed(
dxgiFactory->CreateSwapChainForImmersiveWindow(
m_d3dDevice.Get(),
DX::GetIUnknown(m_window),
&swapChainDesc,
nullptr, // allow on all displays
&m_swapChain
)
);
}
// Once the swap chain is created, create a render target view. This will
// allow Direct3D to render graphics to the window.
ComPtr backBuffer;
DX::ThrowIfFailed(
m_swapChain->GetBuffer(
0,
__uuidof(ID3D11Texture2D),
&backBuffer
)
);
DX::ThrowIfFailed(
m_d3dDevice->CreateRenderTargetView(
backBuffer.Get(),
nullptr,
&m_renderTargetView
)
);
// After the render target view is created, specify that the viewport,
// which describes what portion of the window to draw to, should cover
// the entire window.
D3D11_TEXTURE2D_DESC backBufferDesc = {0};
backBuffer->GetDesc(&backBufferDesc);
D3D11_VIEWPORT viewport;
viewport.TopLeftX = 0.0f;
viewport.TopLeftY = 0.0f;
viewport.Width = static_cast(backBufferDesc.Width);
viewport.Height = static_cast(backBufferDesc.Height);
viewport.MinDepth = D3D11_MIN_DEPTH;
viewport.MaxDepth = D3D11_MAX_DEPTH;
m_d3dDeviceContext->RSSetViewports(1, &viewport);
}
};
// This class defines how to create the custom View Provider defined above.
ref class Direct3DTutorialViewProviderFactory : IViewProviderFactory
{
public:
IViewProvider^ CreateViewProvider()
{
return ref new Direct3DTutorialViewProvider();
}
};
[Platform::MTAThread]
int main(array^)
{
auto viewProviderFactory = ref new Direct3DTutorialViewProviderFactory();
Windows::ApplicationModel::Core::CoreApplication::Run(viewProviderFactory);
return 0;
}

I have marked this as the answer for the time being. Feel free to post a different answer, and I will investigate it, and choose that answer instead. Sometimes the best answer is "Microsoft Magic". Microsoft seems to be doing something internally that it isn't exposing to its 3rd party developers. Not much can be said at this stage in development, so it is currently best to simple use the WARP rasterizer on older devices.....

It looks to me as if you're trying to use some resource that has already been released. Perhaps you should debug output the result from Release(), as it will tell you the number of existing references.

Related

Visual Studio C++ How do I stop images from being scaled wrong when I maximize window D3D11 and D2D1

So I'm trying to iron out kinks of how I'm rendering images with direct stuff. Right now it works as a DXGISwapchain with D3D11 and I make a ID2D1RenderTarget which I draw to using bitmaps. My issue is when I hit the maximize button on the window my images are off, or at least the ones using data from the window grabbed with GetClientRect (the others seem... close enough but probably still off and I want to be able to use the client space to scale and draw things later as well). I have a D2D1::RectF set with the top left at 0.0f and the bottom right as the window's height and width grabbed via GetClientRect (along with a few others just for additional fooling around). Looked around and it seems like I need to call recreate the ID2D1RenderTarget and/or resize the buffers. Calling a function to recreate the ID2D1RenderTarget before making objects which contain the bitmaps and the functions which draw them did not help with the issue at all. I tried resizing the buffers but I keep getting errors, first set were regarding parameters, but before fixing that I realized I needed to release the objects, but now my it seems since I have made the objects with ComPtr it seems how it deals with deleting them is having issues. client.h is calling an exception: "Access violation executing location " with the unsigned long InternalRelease() function. This occurs with the function to adjust the buffers and target. So right now I'm lost as to what to do in order to get the desired effect. Only other things to note is the ID3D11RenderTargetView I made is used to clear to a color since for I had errors with calling Clear(D2D1::ColorF(D2D1::ColorF::White)); on my ID2D1RenderTarget. I don't care if the solution to this resizes the ID3D11RenderTargetView unless it will improve speed for the program or prevent some sort of unforeseen issue elsewhere since I don't intend to use it aside for that. If I could call clear on the ID2D1RenderTarget and no longer need the ID311RenderTargetView and keep the swapchain while resolving the issue that would work too. Also I intend to work out fullscreen mode next so a method that works with that would also be very much desired. I'm also open to take any other advice here, while it's not polished and just in a form to get things working first, I probably will miss things even when tidying up. Anyway here's the code:
Here's the Graphics class where I make the swapchain, buffers and rendertargets and such and the function in which I try and reset the buffers. Side note I followed some tutorials on my way here to get me to understand enough of the direct stuff to get here and get to the point where I'm looking into to stuff on Microsoft and understanding it somewhat to solve problems I have. Anyway one of them went through making some exceptions and that is what stuff like Graphic_Throw_Failure() are for. (though I did it wrong or the errors are doing something and I can't see the pop up window half the time, sometimes it stops in the exception code but I can still read the message)
//not in the cpp file but just how some variables exist for clarity.
private:
Microsoft::WRL::ComPtr<ID3D11Device> pDevice = nullptr;
Microsoft::WRL::ComPtr < IDXGISwapChain> pSwapChain = nullptr;
Microsoft::WRL::ComPtr < ID3D11DeviceContext> pContext = nullptr;
Microsoft::WRL::ComPtr < ID3D11RenderTargetView> pRTarget = nullptr;
ID2D1RenderTarget* p2dRenderTarget = nullptr;
ID2D1Factory* p2DFactory = nullptr;
Graphics::Graphics(HWND hwnd) {
DXGI_SWAP_CHAIN_DESC swapchainDesc = {};
ZeroMemory(&swapchainDesc, sizeof(DXGI_SWAP_CHAIN_DESC));
swapchainDesc.Windowed = true;
swapchainDesc.BufferCount = 1;
swapchainDesc.BufferDesc.Height = 0;
swapchainDesc.BufferDesc.Width = 0;
swapchainDesc.BufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
swapchainDesc.SampleDesc.Count = 1;
swapchainDesc.SampleDesc.Quality = 0;
swapchainDesc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
swapchainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapchainDesc.BufferDesc.RefreshRate.Numerator = 1;
swapchainDesc.BufferDesc.RefreshRate.Denominator = 60;
swapchainDesc.OutputWindow = hwnd;
HRESULT hre;
Graphic_Throw_Failure(D3D11CreateDeviceAndSwapChain(
nullptr,
D3D_DRIVER_TYPE_HARDWARE,
nullptr,
D3D11_CREATE_DEVICE_BGRA_SUPPORT,
levels,
4,
D3D11_SDK_VERSION,
&swapchainDesc,
&pSwapChain,
&pDevice,
nullptr,
&pContext
));
//3Dbuffer setup
wrl::ComPtr<ID3D11Resource> p3dbuffer = nullptr;
Graphic_Throw_Failure(pSwapChain->GetBuffer(0, __uuidof(ID3D11Resource), &p3dbuffer));
Graphic_Throw_Failure(pDevice->CreateRenderTargetView(p3dbuffer.Get(), nullptr, &pRTarget));
//2D buffer Setup
IDXGISurface* pBackBuffer = nullptr;
Graphic_Throw_Failure(pSwapChain->GetBuffer(0,IID_PPV_ARGS(&pBackBuffer)));
//makes 2d Factory
Graphic_Throw_Failure(D2D1CreateFactory(D2D1_FACTORY_TYPE_SINGLE_THREADED, &p2DFactory));
//sets up DXGI buffer for 2d
FLOAT dpi;
dpi = GetDpiForWindow(hwnd);
//p2DFactory->GetDesktopDpi(&dpiX, &dpiY);
D2D1_RENDER_TARGET_PROPERTIES p2dRTprops =
D2D1::RenderTargetProperties(
D2D1_RENDER_TARGET_TYPE_DEFAULT,
D2D1::PixelFormat(DXGI_FORMAT_UNKNOWN, D2D1_ALPHA_MODE_PREMULTIPLIED),
dpi,
dpi
);
Graphic_Throw_Failure(p2DFactory->CreateDxgiSurfaceRenderTarget(
pBackBuffer, &p2dRTprops, &p2dRenderTarget
));
if(pBackBuffer!=nullptr)
pBackBuffer->Release();
}
//the adjusting function I failed to make. could also be missing somethings I need to clear before
//calling ResizeBuffers
void Graphics::adjustRenderTargets(HWND hwnd) {
HRESULT hre;
pContext->ClearState();
p2dRenderTarget->Release();
pRTarget->Release();
//3Dbuffer setup
wrl::ComPtr<ID3D11Resource> p3dbuffer = nullptr;
Graphic_Throw_Failure(pSwapChain->GetBuffer(0, __uuidof(ID3D11Resource), &p3dbuffer));
Graphic_Throw_Failure(pDevice->CreateRenderTargetView(p3dbuffer.Get(), nullptr, &pRTarget));
//2D buffer Setup
IDXGISurface* pBackBuffer = nullptr;
Graphic_Throw_Failure(pSwapChain->GetBuffer(0, IID_PPV_ARGS(&pBackBuffer)));
//makes 2d Factory
Graphic_Throw_Failure(D2D1CreateFactory(D2D1_FACTORY_TYPE_SINGLE_THREADED, &p2DFactory));
//sets up DXGI buffer for 2d
FLOAT dpi;
dpi = GetDpiForWindow(hwnd);
//p2DFactory->GetDesktopDpi(&dpiX, &dpiY);
D2D1_RENDER_TARGET_PROPERTIES p2dRTprops =
D2D1::RenderTargetProperties(
D2D1_RENDER_TARGET_TYPE_DEFAULT,
D2D1::PixelFormat(DXGI_FORMAT_UNKNOWN, D2D1_ALPHA_MODE_PREMULTIPLIED),
dpi,
dpi
);
Graphic_Throw_Failure(p2DFactory->CreateDxgiSurfaceRenderTarget(
pBackBuffer, &p2dRTprops, &p2dRenderTarget
));
if (pBackBuffer != nullptr)
pBackBuffer->Release();
};
//and the destructor in case there is something still wrong there though
//currently not giving me issues since I set the objects in it to nullptr after releasing it.
//didn't work for the ComPtr.
Graphics::~Graphics() {
if (p2DFactory != nullptr) {
p2DFactory->Release();
}
if (p2dRenderTarget != nullptr) {
p2dRenderTarget->Release();
}
}
This is the class which holds the bitmaps and deals with them and drawing them. Once again I made some exceptions for this class
//some variables in the header file
ID2D1Bitmap* Bittmap=nullptr;
Graphics* GFX;
Sprites::Sprites(const wchar_t* filename, Graphics* gfx) {
HRESULT hre;
GFX = gfx;
//makes WIC Factory
IWICImagingFactory* WICfactory = NULL;
Sprite_Throw_Failure(CoCreateInstance(
CLSID_WICImagingFactory,
NULL,
CLSCTX_INPROC_SERVER,
IID_IWICImagingFactory,
(LPVOID*)&WICfactory
));
//Makes the Decoder
IWICBitmapDecoder* WICdecode = NULL;
Sprite_Throw_Failure(WICfactory->CreateDecoderFromFilename(
filename,
NULL,
GENERIC_READ,
WICDecodeMetadataCacheOnLoad,
&WICdecode
));
//Read the frame (should be only one so read the image)
IWICBitmapFrameDecode* WICframe = NULL;
Sprite_Throw_Failure(WICdecode->GetFrame(0, &WICframe));
//Format converter
IWICFormatConverter* WICconverter = NULL;
Sprite_Throw_Failure(WICfactory->CreateFormatConverter(&WICconverter));
//makes the converter set up to create a 32bpp BGRA bitmap
Sprite_Throw_Failure(WICconverter->Initialize(
WICframe,
GUID_WICPixelFormat32bppPBGRA,
WICBitmapDitherTypeNone,
NULL,
0.0,
WICBitmapPaletteTypeCustom
));
//makes the bitmap
Sprite_Throw_Failure(GFX->Get2DRenderTarget()->CreateBitmapFromWicBitmap(
WICconverter,
NULL,
&Bittmap
));
if (WICfactory) WICfactory->Release();
if (WICdecode) WICdecode->Release();
if (WICconverter)WICconverter->Release();
if (WICframe)WICframe->Release();
}
//draws the sprites
void Sprites::Draw(D2D1_RECT_F location) {
HRESULT hre;
GFX->Get2DRenderTarget()->BeginDraw();
GFX->Get2DRenderTarget()->DrawBitmap(
Bittmap,
location, //destination rect
1.0f, //opacity
D2D1_BITMAP_INTERPOLATION_MODE::D2D1_BITMAP_INTERPOLATION_MODE_NEAREST_NEIGHBOR,
D2D1::RectF(
1980.0f, 2850.0f, 3000.0f,
3600.0f) //source rect
);
Sprite_Throw_Failure(GFX->Get2DRenderTarget()-> EndDraw());
}
Sprites::~Sprites() {
//bitmapsheet.clear();
if (Bittmap != nullptr) {
Bittmap->Release();
}
}
This is the class which the main loop is handled wnd is the windowclass I made which makes and manages the window. I use it here to get the graphics object the window uses which has all the direct stuff. here are some variables that appear, forgive the name bob.
//in header file
private:
Window wnd;
//in cpp file
Sprites* testsprite;
Sprites* testsprite2;
D2D1_RECT_F bob;
within the function that calls over and over for the duration of the program at the part where I render:
//inefficient constant adjusting of rendering just so I can quickly assess
//the change and make sure it works so when I implement how I intend the windows to be scaled it
//will already be done
wnd.GFX().adjustRenderTargets(wnd.getwindowhandle());
//clearing the ID3D11RenderTargetView to a color
wnd.GFX().ClearBuffer(0.3f, 0.5f, 1.0f);
//drawing calls
const wchar_t* filename = L"\environmentsketches 02.png";
//this creates a Sprites object getGraphix returns the a pointer to the graphics object
//only really used to get the ID2D1RenderTarget but I may use it for other things, will
//remove later if not needed and just pass the render target unless issues arise.
testsprite = new Sprites(filename, wnd.GFX().getGraphix());
bob = D2D1::RectF(
0.0f, 0.0f, wnd.getwindowWidth(),
wnd.getwindowHeight());
//This draws the bitmap of a predetermined portion of the image but uses bob to
// to determine where to draw the bitmap
testsprite->Draw(bob);
bob = D2D1::RectF(
0.0f, 0.0f, wnd.getwindowWidth()/(16.0f/9.0f),
wnd.getwindowHeight());
testsprite->Draw(bob);
filename= L"\envrioment sketch march 1.png";
bob = D2D1::RectF(
100.0f, 100.0f, 600.f,
300.f);
testsprite2 = new Sprites(filename, wnd.GFX().getGraphix());
testsprite2->Draw(bob);
//EndFrame just calls present on the IDXGISwapChain
wnd.GFX().EndFrame();
testsprite->~Sprites();
testsprite2->~Sprites();
If you read through this thank you and thank you for any advice you have to offer.
Don't code late or tired. So first thing I realized is one when posting the code here I forgot to include the call for SwapChain->ResizeBuffers(0,width,height, DXGI_FORMAT_UNKNOWN, 0)) which is where the mistake was. I changed my pointers from smart to regular to manage their release manually for this, but the issue was more so that the last parameter wasn't 0, it was D3D11_CREATE_DEVICE_BGRA_SUPPORT instead of a proper swapchain flag and was reading as another (I believe DXGI_SWAP_CHAIN_FLAG_DISPLAY_ONLY) which I couldn't use because of how I made my swapchain resulting in an error and it just not running.
In all the solution to my problem was to release my render targets (I already released by buffers after I made the targets), resize the buffers and then remake the ID2D1RenderTarget. Just don't put a wrong flag in, (or make sure the call is in when posting it for feedback to the mistake might be caught by others.)

Creating Swap Chain causes Window Handle to become Invalid

I'm trying to work with both Direct2D and Direct3D. Right now, I'm having Direct2D draw content to a separate Device Context (with an HDC) and then copy the contents of that Device Context into my window. I could show the code I use to set that up in an edit to this post if requested, but before Direct3D gets involved, that part works.
Here is a simplified version of the Window Drawing code I use.
if (d3dEngine.Get()) // Object used to hold Direct3D Resources (.Get() returns a pointer for a null check)
{
// d3dEngine->PrepareScene(D2D1::ColorF(D2D1::ColorF::Wheat));
}
// Drawing Board holds the Direct 2D Render Target
drawingBoard->GetRenderer()->BeginDraw();
drawingBoard->GetRenderer()->Clear(D2D1::ColorF(1.0f,1.0f,1.0f,1.0f));
mainPage->Draw(); // Main Page Holds various objects that draw to Direct2D
if (d3dEngine.Get())
d3dEngine->FinalizeScene();
drawingBoard->GetRenderer()->EndDraw();
// Get the Secondary Device Context that Direct2D draws to
HDC dc = drawingBoard->GetDc();
RECT r{ 0,0,0,0 };
int err = 0;
// Retrieve the Rectangle for the window (currentWindow is the window handle used)
if(!GetClientRect(currentWindow, &r))
err = GetLastError();
// Use the BitBlt function to copy Direct2D content into a window
if (!BitBlt(GetDC(currentWindow), r.left, r.top, r.right - r.left, r.bottom - r.top, dc, 0, 0, SRCCOPY))
err = GetLastError();
Before any Direct3D resources are created (and the d3dEngine.Get() call returns null), this code runs to my satisfaction.
However, following the creation of Direct3D resources, the code fails:
RECT r{ 0,0,0,0 };
int err = 0;
// Retrieve the Rectangle for the window (currentWindow is the window handle used)
if(!GetClientRect(currentWindow, &r))
err = GetLastError();
The window handle currentWindow becomes invalid as GetLastError() returns 1400 after the call to GetClientRect. I suspect that the Swap Chain in Direct3D 11 may play a role due to the following code used to activate Direct3D.
GetClientRect(window, &Location);
unsigned int width = Location.right - Location.left,
height = Location.bottom - Location.top;
D3D_DRIVER_TYPE dTypes[] =
{
D3D_DRIVER_TYPE_HARDWARE//, D3D_DRIVER_TYPE_WARP
};
int tTypes = ARRAYSIZE(dTypes);
D3D_FEATURE_LEVEL dLevels[] =
{
D3D_FEATURE_LEVEL_11_1, D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1, D3D_FEATURE_LEVEL_10_0
};
int tLevels = ARRAYSIZE(dLevels);
DXGI_SWAP_CHAIN_DESC swapChainDescription;
// Initialize the swap cahin
swapChainDescription.BufferCount = 2;
swapChainDescription.BufferDesc.Width = Location.right - Location.left;
swapChainDescription.BufferDesc.Height = Location.bottom - Location.top;
swapChainDescription.BufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
swapChainDescription.BufferDesc.RefreshRate.Numerator = 30;
swapChainDescription.BufferDesc.RefreshRate.Denominator = 1;
swapChainDescription.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDescription.OutputWindow = window;
swapChainDescription.Windowed = true;
swapChainDescription.SampleDesc.Count = 1;
swapChainDescription.SampleDesc.Quality = 0;
swapChainDescription.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
swapChainDescription.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
swapChainDescription.Flags = DXGI_SWAP_CHAIN_FLAG_GDI_COMPATIBLE;
swapChainDescription.SwapEffect = DXGI_SWAP_EFFECT_FLIP_DISCARD;// DXGI_SWAP_EFFECT_DISCARD;
unsigned int flags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
/// Other initialization Code
HRESULT results = 0;
// Initialize ID3D11Device "graphicsDevice"
IDXGISwapChain* sc = nullptr;
results = dxFact->CreateSwapChain(graphicsDevice, &swapChainDescription, &sc);
This code is in a different class than the Window drawing code towards the beginning.
The window variable in the Direct3D code holds the same value that currentWindow holds in the Window Code.
Is there anyone who could provide insight as to what is going on and why the window handle stops working? And perhaps suggest a workaround?
While I'm still unsure why the window handle seemed to be useless when I checked, I was able to develop a workaround.
Basically, I have my Window class hold a RECT indicating the size of the Window and I use that instead of getting the RECT from the Window every time.
RECT r{ 0,0,0,0 };
int err = 0;
// Retrieve the Rectangle for the window (currentWindow is the window handle used)
if(!GetClientRect(currentWindow, &r))
err = GetLastError();
// Use the BitBlt function to copy Direct2D content into a window
if (!BitBlt(GetDC(currentWindow), r.left, r.top, r.right - r.left, r.bottom - r.top, dc, 0, 0, SRCCOPY))
err = GetLastError();
// Now, it uses a 'size' attribute
int err = 0;
if (!BitBlt(GetTWindowDc(), size.left, size.top, size.right - size.left, size.bottom - size.top, dc, 0, 0, SRCCOPY))
err = GetLastError();
When it comes to getting the Window Device Context, the new GetTWindowDc() method takes care of that:
if (d3dEngine.Get())
return d3dEngine->GetDC();
return GetDC(currentWindow);
Basically, if Direct3D is activated, the 3D manager object (which holds a DXGISurface) retrieves the HDC from Direct3D to use.
HDC TWindowEngine::GetDC()
{
if (!surface.Get()) // My Unique Smart pointer to a DXGISurface1 object
return 0;
HDC dc;
assert(SUCCEEDED(surface->GetDC(FALSE, &dc)));
return dc;
}
According to Surface-GetDc Documentation, you need to call the corresponding ReleaseDc on the DXGISurface1 object. So this is how I do it.
In the Window Draw code:
if (!BitBlt(GetTWindowDc(), size.left, size.top, size.right - size.left, size.bottom - size.top, dc, 0, 0, SRCCOPY))
err = GetLastError();
FlushDc();
Here is the FlushDc method:
void TWindow::FlushDc()
{
if (d3dEngine.Get())
d3dEngine->ClearDC();
}
And the ClearDC method is impelented.
void TWindowEngine::ClearDC()
{
if (surface.Get())
surface->ReleaseDC(nullptr);
}

Convert IMFSample* to ID3D11ShaderResourceView*

I am new to DirectX an I am trying to do a simple application that reads a video and display it on a Quad.
I read the video using Windows Media Foundation (IMFSourceReader), that sends me a callback when a sample is decoded (IMFSample).
I want to convert this IMFSample* to a ID3D11ShaderResourceView* in order to use it as a texture to draw my quad, however the conversion fails.
Here is what I do (I removed non relevant error checks):
HRESULT SourceReaderCB::OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex, DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample)
{
...
DWORD NumBuffers = 0;
hr = pSample->GetBufferCount(&NumBuffers);
if (FAILED(hr) || NumBuffers < 1)
{
...
}
IMFMediaBuffer* SourceMediaPtr = nullptr;
hr = pSample->GetBufferByIndex(0, &SourceMediaPtr);
if (FAILED(hr))
{
...
}
ComPtr<IMFMediaBuffer> _pInputBuffer = SourceMediaPtr;
ComPtr<IMF2DBuffer2> _pInputBuffer2D2;
bool isVideoFrame = (_pInputBuffer.As(&_pInputBuffer2D2) == S_OK);
if (isVideoFrame)
{
IMFDXGIBuffer* pDXGIBuffer = NULL;
ID3D11Texture2D* pSurface = NULL;
hr = _pInputBuffer->QueryInterface(__uuidof(IMFDXGIBuffer), (LPVOID*)&pDXGIBuffer);
if (FAILED(hr))
{
SafeRelease(&SourceMediaPtr);
goto done;
}
hr = pDXGIBuffer->GetResource(__uuidof(ID3D11Texture2D), (LPVOID*)&pSurface);
if (FAILED(hr))
{
...
}
ID3D11ShaderResourceView* resourceView;
if (pSurface)
{
D3D11_TEXTURE2D_DESC textureDesc;
pSurface->GetDesc(&textureDesc);
D3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc;
shaderResourceViewDesc.Format = DXGI_FORMAT_R8_UNORM;
shaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
shaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
shaderResourceViewDesc.Texture2D.MipLevels = 1;
ID3D11ShaderResourceView* resourceView;
hr = d3d11device->CreateShaderResourceView(pSurface, &shaderResourceViewDesc, &resourceView);
if (FAILED(hr))
{
... // CODE FAILS HERE
}
...
}
}
}
My first issue is that I set the shaderResourceViewDesc.Format as DXGI_FORMAT_R8_UNORM which will probably just give me red image (I will have to investigate this later).
The second and blocking issue I am facing ius that the conversion of ID3D11Texture2D to ID3D11ShaderResourceView fails with following error message:
ID3D11Device::CreateShaderResourceView: A ShaderResourceView cannot be created of a Resource that did not specify the D3D11_BIND_SHADER_RESOURCE BindFlag. [ STATE_CREATION ERROR #129: CREATESHADERRESOURCEVIEW_INVALIDRESOURCE]
I understand that there is a flag missing at the creation of the texture that prevents me to do what I want to do, but as the data buffer is created by WMF, I am not sure what I am supposed to do to fix this issue.
Thanks for your help
I see you code, and I can say that your way is wrong - no offense. Firstly, video decoder creates simple texture - in you situation DirectX11 texture - it is a regular texture - it is not shader resource, as a result it cannot be used in shader code. In my view, there are two way for resolving of your task:
Research - Walkthrough: Using MF to render video in a Direct3D app - this link present way for "Walkthrough: Using Microsoft Media Foundation for Windows Phone 8" - from your code I see that you try write solution for WindowsStore - UWP and code for Windows Phone is workable - this code needs MediaEnginePlayer - The MediaEnginePlayer class serves as a helper class that wraps the MF APIs;
Find on GitHub Windows-classic-samples and find in that DX11VideoRenderer - this is full code of Media Foundation renderer with DirectX11 - it includes very good example for using of DirectX11 Video Processor which does blitting of regular video texture from decoder into the rendering video texture of swap-chain:
2.1. Get rendering texture from Swap Chain:
// Get Backbuffer
hr = m_pSwapChain1->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**)&pDXGIBackBuffer);
if (FAILED(hr))
{
break;
}
2.2. Create from rendering texture output view of video processor:
//
// Create Output View of Output Surfaces.
//
D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC OutputViewDesc;
ZeroMemory( &OutputViewDesc, sizeof( OutputViewDesc ) );
if (m_b3DVideo && m_bStereoEnabled)
{
OutputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2DARRAY;
}
else
{
OutputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D;
}
OutputViewDesc.Texture2D.MipSlice = 0;
OutputViewDesc.Texture2DArray.MipSlice = 0;
OutputViewDesc.Texture2DArray.FirstArraySlice = 0;
if (m_b3DVideo && 0 != m_vp3DOutput)
{
OutputViewDesc.Texture2DArray.ArraySize = 2; // STEREO
}
QueryPerformanceCounter(&lpcStart);
hr = m_pDX11VideoDevice->CreateVideoProcessorOutputView(pDXGIBackBuffer, m_pVideoProcessorEnum, &OutputViewDesc, &pOutputView);
2.3. Create from regular decoder video texture input view for video processor:
D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC InputLeftViewDesc;
ZeroMemory( &InputLeftViewDesc, sizeof( InputLeftViewDesc ) );
InputLeftViewDesc.FourCC = 0;
InputLeftViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D;
InputLeftViewDesc.Texture2D.MipSlice = 0;
InputLeftViewDesc.Texture2D.ArraySlice = dwLeftViewIndex;
hr = m_pDX11VideoDevice->CreateVideoProcessorInputView(pLeftTexture2D, m_pVideoProcessorEnum, &InputLeftViewDesc, &pLeftInputView);
if (FAILED(hr))
{
break;
}
2.4. Do blitting of regular decoder video texture on rendering texture from Swap Chain:
D3D11_VIDEO_PROCESSOR_STREAM StreamData;
ZeroMemory( &StreamData, sizeof( StreamData ) );
StreamData.Enable = TRUE;
StreamData.OutputIndex = 0;
StreamData.InputFrameOrField = 0;
StreamData.PastFrames = 0;
StreamData.FutureFrames = 0;
StreamData.ppPastSurfaces = NULL;
StreamData.ppFutureSurfaces = NULL;
StreamData.pInputSurface = pLeftInputView;
StreamData.ppPastSurfacesRight = NULL;
StreamData.ppFutureSurfacesRight = NULL;
if (m_b3DVideo && MFVideo3DSampleFormat_MultiView == m_vp3DOutput && pRightTexture2D)
{
StreamData.pInputSurfaceRight = pRightInputView;
}
hr = pVideoContext->VideoProcessorBlt(m_pVideoProcessor, pOutputView, 0, 1, &StreamData );
if (FAILED(hr))
{
break;
}
Yes, they are sections of complex code, and it needs research whole DX11VideoRenderer project for understanding of it - it will take huge amount of time.
Regards,
Evgeny Pereguda
Debug output suggests that the texture is not compatible, as it was created without D3D11_BIND_SHADER_RESOURCE flag (specified in BindFlag field of D3D11_TEXTURE2D_DESC structure.
You read the texture already created by Media Foundation primitive. In some cases you can alter the creation flags, however the general case is that you need to create a compatible texture on your own, copy the data between the textures, and then call CreateShaderResourceView method with your texture as an argument rather than original texture.

The function CreateWICTextureFromFile() will not actually load a texture (Direct3D11, C++)

I am trying to load a grass texture onto my game with the function DirectX::CreateWICTextureFromFile but everytime I do, the function won't seem to actually load anything, it just loads a black texture. The function successfully returns S_OK, and i've also called the CoInitialize(NULL) before I actually call the function. But it still doesn't work.
Down below is my usage of the function
// This is where i load the texture
void Load_Texture_for_Ground()
{
HRESULT status;
ID3D11ShaderResourceView * Texture;
CoInitialize(NULL);
status = DirectX::CreateWICTextureFromFile(device, L"AmazingGrass.jpg", NULL, &Texture);
if (Texture != NULL) // This returns true
{
MessageBox(MainWindow, L"The pointer points to the texture", L"MessageBox", MB_OK);
}
if (status == S_OK) //This returns true
{
MessageBox(MainWindow, L"The function succeeded", L"MessageBox", MB_OK);
}
CoUninitialize();
}
// This is where i actually load the texture onto an object, assuming i already declared all the variables in this function
void DrawTheGround ()
{
DevContext->VSSetShader(VS, 0, 0);
DevContext->PSSetShader(PS, 0, 0);
DevContext->IASetVertexBuffers(
0,
1,
&GroundVertexBuffer,
&strides,
&offset
);
DevContext->IASetIndexBuffer(
IndexBuffer,
DXGI_FORMAT_R32_UINT,
0
);
/* Transforming the matrices*/
TransformedMatrix = GroundWorld * CameraView * CameraProjection ;
Data.WORLDSPACE = XMMatrixTranspose(GroundWorld);
Data.TRANSFORMEDMATRIX = XMMatrixTranspose(TransformedMatrix);
/* Updating the matrix in application's Constant Buffer*/
DevContext->UpdateSubresource(
ConstantBuffer,
0,
NULL,
&Data,
0,
0
);
DevContext->VSSetConstantBuffers(0, 1, &ConstantBuffer);
DevContext->PSSetShaderResources(0, 1, &Texture);
DevContext->PSSetSamplers(0, 1, &TextureSamplerState);
DevContext->DrawIndexed(6, 0, 0);
}
What could be wrong here? Why won't the function load the texture?
A quick way to test if you have loaded the texture data correctly is to use SaveWICTextureToFile in the ScreenGrab module right after loading it. You'd only do this for debugging of course.
#include <wincodec.h>
#include <wrl/cient.h>
using Microsoft::WRL::ComPtr;
ComPtr<ID3D11Resource> Res;
ComPtr<ID3D11ShaderResourceView> Texture;
HRESULT status = DirectX::CreateWICTextureFromFile(device, L"AmazingGrass.jpg", &Res, &Texture);
if (FAILED(status))
// Error handling
#ifdef _DEBUG
status = SaveWICTextureToFile( DevContext, Res.Get(),
GUID_ContainerFormatBmp, L"SCREENSHOT.BMP" );
#endif
Then you can run the code and check that SCREENSHOT.BMP is not all black.
I strongly suggest you adopt the ComPtr smart pointer and the FAILED / SUCCEEDED macros in your coding style. Raw pointers and directly comparing HRESULT to S_OK is setting yourself up for a lot of bugs.
You should not call CoInitialize every frame. You should call it once as part of your application's initialization.
You should not be creating a new instance of SpriteBatch and SpriteFont every frame. Just create them after you create your device and hold on to them.

Unable to read depth buffer from Compute shader

I am unable to read depth buffer from compute shader.
I am using this in my hlsl code.
Texture2D<float4> gDepthTextures : register(t3);
// tried this.
//Texture2D<float> gDepthTextures : register(t3);
// and this.
//Texture2D<uint> gDepthTextures : register(t3);
// and this.
//Texture2D<uint4> gDepthTextures : register(t3);
And doing this to read the buffer.
outputTexture[dispatchThreadId.xy]=gDepthTextures.Load(int3(dispatchThreadId.xy,0));
And I am detaching depth buffer from render target.
ID3D11RenderTargetView *nullView[3]={NULL,NULL,NULL};
g_pImmediateContext->OMSetRenderTargets( 3, nullView, NULL );
Still I am getting this error in output.
*D3D11 ERROR: ID3D11DeviceContext::Dispatch: The Shader Resource View dimension declared in the shader code (TEXTURE2D) does not match the view type bound to slot 3 of the Compute Shader unit (BUFFER). This mismatch is invalid if the shader actually uses the view (e.g. it is not skipped due to shader code branching). [ EXECUTION ERROR #354: DEVICE_DRAW_VIEW_DIMENSION_MISMATCH]*
This is how I am creating shader resource view.
// Create depth stencil texture
D3D11_TEXTURE2D_DESC descDepth;
ZeroMemory( &descDepth, sizeof(descDepth) );
descDepth.Width = width;
descDepth.Height = height;
descDepth.MipLevels = 1;
descDepth.ArraySize = 1;
descDepth.Format = DXGI_FORMAT_R32_TYPELESS;
descDepth.SampleDesc.Count = 1;
descDepth.SampleDesc.Quality = 0;
descDepth.Usage = D3D11_USAGE_DEFAULT;
descDepth.BindFlags = D3D11_BIND_DEPTH_STENCIL | D3D11_BIND_SHADER_RESOURCE;
descDepth.CPUAccessFlags = 0;
descDepth.MiscFlags = 0;
hr = g_pd3dDevice->CreateTexture2D( &descDepth, NULL, &g_pDepthStencil );
if( FAILED( hr ) )
return hr;
// Create the depth stencil view
D3D11_DEPTH_STENCIL_VIEW_DESC descDSV;
ZeroMemory( &descDSV, sizeof(descDSV) );
descDSV.Format = DXGI_FORMAT_D32_FLOAT;
descDSV.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
descDSV.Texture2D.MipSlice = 0;
hr = g_pd3dDevice->CreateDepthStencilView( g_pDepthStencil, &descDSV, &g_pDepthStencilView );
if( FAILED( hr ) )
return hr;
// Create depth shader resource view.
D3D11_SHADER_RESOURCE_VIEW_DESC srvDesc;
ZeroMemory(&srvDesc,sizeof(D3D11_SHADER_RESOURCE_VIEW_DESC));
srvDesc.Format=DXGI_FORMAT_R32_UINT;
srvDesc.ViewDimension=D3D11_SRV_DIMENSION_TEXTURE2D;
srvDesc.Texture2D.MostDetailedMip=0;
srvDesc.Texture2D.MipLevels=1;
hr=g_pd3dDevice->CreateShaderResourceView(g_pDepthStencil,&srvDesc,&g_pDepthSRV);
if(FAILED(hr))
return hr;
I have tried all the formats mentioned here in combination with the hlsl texture formats float, float4, uint, uint4 with no success. Any idea?
Replace DXGI_FORMAT_R32_UINT by DXGI_FORMAT_R32_FLOAT for your shader resource view, since you use R32_Typeless, you have a floating point buffer.
Texture2D gDepthTextures will be the one you need to load or sample the depth later.
Also it looks like that your texture is not bound properly to your compute shader (since runtime tells you you have a buffer bound in there).
Make sure you have:
immediateContext->CSSetShaderResources(3,1,g_pDepthSRV);
Called before your dispatch.
As a side note, to debug those type of issues, you can also call CSGetShaderResources (and other equivalent), in order to check what's bound in your pipeline before your call.