I have a 2d diffuse texture loaded into memory and want to create a dx11 texture from it. It does not need to be altered so I made it immutable.
DX11Texture::DX11Texture(ID3D11Device* device, const std::vector<uint8_t>& textureData, uint32_t textureWidth, uint32_t textureHeight, TextureType textureType, Logger& logger) :
mTexture(nullptr), mTextureID(gNextTextureID++)
{
D3D11_TEXTURE2D_DESC textureDesc;
ZeroMemory(&textureDesc, sizeof(D3D11_TEXTURE2D_DESC));
textureDesc.Width = textureWidth;
textureDesc.Height = textureHeight;
textureDesc.MipLevels = 1;
textureDesc.ArraySize = 1;
textureDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
textureDesc.SampleDesc.Count = 1;
textureDesc.Usage = D3D11_USAGE_IMMUTABLE;
textureDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
D3D11_SUBRESOURCE_DATA initData;
ZeroMemory(&initData, sizeof(D3D11_SUBRESOURCE_DATA));
initData.pSysMem = &textureData.at(0);
DXCALL(device->CreateTexture2D(&textureDesc, &initData, &mTexture)); // throws E_INVALIDARG result
}
I don't see why it is causing a faulty result from the CreateTexture2D call. For example, I have a texture 128x128 R8B8G8A8 texture and it throws a bad result. Any ideas why?
You should probably also set the value of SysMemPitch member of initData as explained here msdn: d3d11_subresource_data. Also keep in mind that CreateTexture2D expects a pointer to an array of D3D11_SUBRESOURCE_DATA (one for each mip level).
Related
Im trying to create a game view for my game engine using ImGui framework,however when i try to render texture,there is only clear color displayed on the screen.
My plan
Creating Render Target Texture(I think i done this part right(i think))
Writing my scene into target texture(I presume this is where i failed,only clear color displayed on the screen,my test triangle not displayed)
Crearting a shader resource view(I tested this and the next step via loading image from the disk,it shows no problem here)
Display it on İmgui::Image()
Using render texture only clear color and Imgui Editor elements displayed on the screen when set back to normal rendering,my test triangle appears on the screen.
My Render Texture Creation
D3D11_TEXTURE2D_DESC textureDesc;
D3D11_RENDER_TARGET_VIEW_DESC renderTargetViewDesc;
D3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc;
ZeroMemory(&textureDesc, sizeof(textureDesc));
textureDesc.Width = 1920;
textureDesc.Height = 1080;
textureDesc.MipLevels = 1;
textureDesc.ArraySize = 1;
textureDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
textureDesc.SampleDesc.Count = 1;
textureDesc.Usage = D3D11_USAGE_DEFAULT;
textureDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
textureDesc.CPUAccessFlags = 0;
textureDesc.MiscFlags = 0;
textureDesc.SampleDesc.Count = 1;
textureDesc.SampleDesc.Quality = 0;
renderTargetViewDesc.Format = textureDesc.Format;
renderTargetViewDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
renderTargetViewDesc.Texture2D.MipSlice = 0;
DLE_Graphics::CurrentGraphics->pDevice->CreateTexture2D(&textureDesc, NULL, pTargetTexture.GetAddressOf());
shaderResourceViewDesc.Format = textureDesc.Format;
shaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
shaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
shaderResourceViewDesc.Texture2D.MipLevels = 1;
DLE_Graphics::CurrentGraphics->pDevice->CreateShaderResourceView(this->pTargetTexture.Get(), &shaderResourceViewDesc, this->pResourceView.GetAddressOf());
// (re)-create the render target view
DLE_Graphics::CurrentGraphics->pSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), reinterpret_cast<void**>(pTargetTexture.GetAddressOf()));
DLE_Graphics::CurrentGraphics->pDevice->CreateRenderTargetView(pTargetTexture.Get(), &renderTargetViewDesc, pRenderTargetView.GetAddressOf());
// create the depth and stencil buffer
D3D11_TEXTURE2D_DESC dsd;
pTargetTexture->GetDesc(&dsd);
dsd.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
dsd.Usage = D3D11_USAGE_DEFAULT;
dsd.BindFlags = D3D11_BIND_DEPTH_STENCIL;
DLE_Graphics::CurrentGraphics->pDevice->CreateTexture2D(&dsd, NULL, pDepthTexture.GetAddressOf());
DLE_Graphics::CurrentGraphics->pDevice->CreateDepthStencilView(pDepthTexture.Get(), NULL, pDepthView.GetAddressOf());
// activate the depth and stencil buffer
DLE_Graphics::CurrentGraphics->pContext->OMSetRenderTargets(1, pRenderTargetView.GetAddressOf(), pDepthView.Get());
My Rendering Code
const FLOAT clr[] = { 0.0f, 0.5f, 0.5f, 0.0f };
pContext->ClearRenderTargetView(pRenderTexture->GetRenderView().Get(), clr);
pContext->ClearDepthStencilView(pRenderTexture->GetDepthView().Get(), D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 0u, 0u);
pContext->OMSetRenderTargets(1u, pRenderTexture->GetRenderView().GetAddressOf(), pRenderTexture->GetDepthView().Get());
pContext->DrawIndexed(size, 0u, 0u);
pSwapChain->Present(0u, 0u);
If anyone curious this is my swap chain description
DXGI_SWAP_CHAIN_DESC description = {};
description.BufferDesc.Width = 1920;
description.BufferDesc.Height = 1080;
description.BufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
description.BufferDesc.RefreshRate.Numerator = 60;
description.BufferDesc.RefreshRate.Denominator = 1;
description.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
description.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
description.SampleDesc.Count = 1;
description.SampleDesc.Quality = 0;
description.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT ;
description.BufferCount = 1;
description.OutputWindow = hWnd;
description.Windowed = TRUE;
description.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
description.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH;
When i use my render texture and create a resource view and use ImGui::Image to show there is no image,not even clear color.If i use my image on the disk for the ImGui::Image it displays the image,i dont what im doing wrong here
Here's my imgui code
ImGui::Begin("Spectrum Observer");
ImVec2 pos = ImGui::GetCursorScreenPos();
//ImGui::Image(texture->GetResource(), ImVec2(512, 512)); // my image from the disk
ImGui::Image(pGraphics->pRenderTexture->GetResourceView().Get(), ImVec2(512, 512)); // render texture
ImGui::End();
ImGui::Render();
ImGui_ImplDX11_RenderDrawData(ImGui::GetDrawData());
I appreciate every input possible,thank you
I managed to solve the triangle rendering problem via changing ClearDepthStencilView() 0u to 1u
const FLOAT clr[] = { 0.0f, 0.5f, 0.5f, 0.0f };
pContext->ClearRenderTargetView(pRenderTexture->GetRenderView().Get(), clr);
pContext->ClearDepthStencilView(pRenderTexture->GetDepthView().Get(), D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 1u, 0u);
pContext->OMSetRenderTargets(1u, pRenderTexture->GetRenderView().GetAddressOf(), pRenderTexture->GetDepthView().Get());
pContext->DrawIndexed(size, 0u, 0u);
pSwapChain->Present(0u, 0u);
Appearently i was clearing the depth buffer to 0 which indicates all depth values nearest possiible which leads to failing ztest.
I'm trying to write in a texture with compute shader in HLSL.
Creation of the texture :
D3D11_TEXTURE2D_DESC textureDesc;
ZeroMemory(&textureDesc, sizeof(textureDesc));
textureDesc.Width = 512;
textureDesc.Height = 512;
textureDesc.MipLevels = 1;
textureDesc.ArraySize = 1;
textureDesc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
textureDesc.SampleDesc.Count = 1;
textureDesc.SampleDesc.Quality = 0;
textureDesc.Usage = D3D11_USAGE_DEFAULT;
textureDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_UNORDERED_ACCESS;
textureDesc.CPUAccessFlags = 0;
textureDesc.MiscFlags = 0;
m_tex = 0;
hr = device->CreateTexture2D(&textureDesc, 0, &m_tex);
Creation of the UAV :
D3D11_UNORDERED_ACCESS_VIEW_DESC descUAV;
ZeroMemory(&descUAV, sizeof(descUAV));
descUAV.Format = DXGI_FORMAT_UNKNOWN;
descUAV.ViewDimension = D3D11_UAV_DIMENSION_TEXTURE2D;
descUAV.Texture2D.MipSlice = 0;
hr = device->CreateUnorderedAccessView(m_tex, &descUAV, &m_uavAccess);
Creation of the SRV (to see the texture) :`
D3D11_SHADER_RESOURCE_VIEW_DESC srvDesc;
srvDesc.Format = textureDesc.Format;
srvDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
srvDesc.Texture2D.MostDetailedMip = 0;
srvDesc.Texture2D.MipLevels = 1;
hr = device->CreateShaderResourceView(m_tex, &srvDesc, &m_srvTexOutput);
The shader :
RWTexture2D<float4> gOutput : register(u0);
[numthreads(16, 16, 1)]
void main(int3 dispatchThreadID : SV_DispatchThreadID) // Thread ID
{
gOutput[dispatchThreadID.xy] = float4(0.0f, 1.0f, 0.0f, 1.0f);
}
The problem is that the texture is always black (the compute shader doesn't write to the texture).
Thanks for your help ! :D
The problem is that I bound an UAV and a SRV on the same texture.
I resolved the problem creating two textures : one bound to an UAV and another bound to a SRV.
The compute shader write on the UAV and, each frame, I copy the data on the SRV with deviceContext->CopyResource(m_texSRV, m_texUAV);
The problem now is the performance, CopyResource is a costly operation. Does it have an other solution less expensive ?
Thanks for your help ! :D
The issue is that you need to cause the GPU to flush the surface before using it as an input in a different shader invocation (draw/PS). You do this by binding a null resource to the previous shader stage.
ID3D11UnorderedAccessView *NullUav = nullptr;
m_DeviceContext->CSSetUnorderedAccessViews(0, 1, &NullUav, nullptr);
This should be enough to tell the graphics driver to flush or transition the resource from a write to a read resource.
If you use D3D12 or vulkan you will need a resource barrier when going from write to read.
Example render loop:
// Run compute kernel output to UAV.
m_DeviceContext->CSSetShader(m_ComputeShader, NULL, 0);
m_DeviceContext->CSSetUnorderedAccessViews(0, 1, &m_UAV, nullptr);
m_DeviceContext->Dispatch(DispatchX, DispatchY, DispatchZ);
// Run the raster pipeline to present the UAV values to screen.
ID3D11UnorderedAccessView *NullUav = nullptr;
ID3D11ShaderResourceView* NullSrv = nullptr;
ID3D11DepthStencilView *NullDsv = nullptr;
m_DeviceContext->PSSetShader(m_PixelShader, NULL, 0);
m_DeviceContext->VSSetShader(m_VertexShader, NULL, 0);
m_DeviceContext->CSSetUnorderedAccessViews(0, 1, &NullUav, nullptr);
m_DeviceContext->PSSetShaderResources(0, 1, &m_SRV);
m_DeviceContext->IASetIndexBuffer(m_IndexBuffer, DXGI_FORMAT_R32_UINT, 0);
m_DeviceContext->PSSetSamplers(0, 1, &m_SamplerState);
m_DeviceContext->OMSetRenderTargets(1, &m_RenderTarget, NullDsv);
m_ViewPort.Width = (float)RTWidth;
m_ViewPort.Height = (float)RTHeight;
m_ViewPort.MaxDepth = 1.0f;
m_DeviceContext->RSSetViewports(1, &m_ViewPort);
m_DeviceContext->DrawIndexed(4, 0, 0);
// Present the final result.
int Result = m_SwapChain->Present(1, 0);
if (Result == DXGI_ERROR_DEVICE_REMOVED || Result == DXGI_ERROR_DEVICE_RESET) {
// Reinitialize the renderer..
}
// Unset the SRV
m_DeviceContext->PSSetShaderResources(0, 1, &NullSrv);
I'm porting my OpenGL Renderer to DirectX. Almost everything work fine, but I have a problem with depth. Everything in 3D is rendering as flat. I've checked Visual Studio Graphics Analyzer and Depth Buffer and State is bind. If I check Depth/Stencil Texture Resource, there are only values 0.f (where something is on screen) and 1.f (where nothing is rendered). OpenGL renderer checked with gDEBugger shows values from 0.f to 1.f and OpenGL is working. I've also checked error codes. Everything returns S_OK. What could I do wrong?
Here is my DepthStencilState creation:
D3D11_DEPTH_STENCIL_DESC dsDescEn;
ZeroMemory(&dsDescEn, sizeof(D3D11_DEPTH_STENCIL_DESC));
dsDescEn.DepthEnable = TRUE;
dsDescEn.DepthWriteMask = D3D11_DEPTH_WRITE_MASK_ALL;
dsDescEn.DepthFunc = D3D11_COMPARISON_LESS;
dsDescEn.StencilEnable = TRUE;
dsDescEn.StencilReadMask = 0xFF;
dsDescEn.StencilWriteMask = 0xFF;
dsDescEn.FrontFace.StencilFailOp = D3D11_STENCIL_OP_KEEP;
dsDescEn.FrontFace.StencilDepthFailOp = D3D11_STENCIL_OP_INCR;
dsDescEn.FrontFace.StencilPassOp = D3D11_STENCIL_OP_KEEP;
dsDescEn.FrontFace.StencilFunc = D3D11_COMPARISON_ALWAYS;
dsDescEn.BackFace.StencilFailOp = D3D11_STENCIL_OP_KEEP;
dsDescEn.BackFace.StencilDepthFailOp = D3D11_STENCIL_OP_DECR;
dsDescEn.BackFace.StencilPassOp = D3D11_STENCIL_OP_KEEP;
dsDescEn.BackFace.StencilFunc = D3D11_COMPARISON_ALWAYS;
DirectX11Device::getDevice().getDevicePtr()->CreateDepthStencilState(&dsDescEn, &m_dSStateEn);
I'm binding State via OMSetDepthStencilState.
D3D11_TEXTURE2D_DESC depthStencilDesc;
depthStencilDesc.Width = width;
depthStencilDesc.Height = height;
depthStencilDesc.MipLevels = 1;
depthStencilDesc.ArraySize = 1;
depthStencilDesc.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
depthStencilDesc.SampleDesc.Count = 1;
depthStencilDesc.SampleDesc.Quality = 0;
depthStencilDesc.Usage = D3D11_USAGE_DEFAULT;
depthStencilDesc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
depthStencilDesc.CPUAccessFlags = 0;
depthStencilDesc.MiscFlags = 0;
D3D11_DEPTH_STENCIL_VIEW_DESC depthStencilViewDesc;
ZeroMemory(&depthStencilViewDesc, sizeof(D3D11_DEPTH_STENCIL_VIEW_DESC));
depthStencilViewDesc.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
depthStencilViewDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
depthStencilViewDesc.Texture2D.MipSlice = 0;
DirectX11Device::getDevice().getDevicePtr()->CreateTexture2D(&depthStencilDesc, NULL, &m_stencilDepthBuff);
DirectX11Device::getDevice().getDevicePtr()->CreateDepthStencilView(m_stencilDepthBuff, &depthStencilViewDesc, &m_stencilDepthView);
I've found a solution. The problem was matrices multiplication. I moved it to shader from C++ and now it's working without problems.
I try to render the scene to texture which should be then displayed in corner of the screen.
I though that I can do that this way:
Render the scene (my Engine::render() method that will set shaders and make draw calls) - works ok.
Change render target to the texture.
Render the scene again - does not work. The context->ClearRenderTargetView(texture->getRenderTargetView(), { 1.0f, 0.0f, 0.0f, 1.0f } ) does set my texture to red color (for scene in step 1. I use different color), but none objects are being rendered on it.
Change render target back to original.
Render the scene for the last time, with rectangle at corner that has the texture I've rendered in step 3. - works ok. I see the scene, the little rectangle in the corner too. The problem is, it's just red (something went wrong with rendering in step 3., I guess).
The result (there should be "image in image" instead of red rectangle):
The code for steps 2. - 4.:
context->OMSetRenderTargets(1, &textureRenderTargetView, depthStencilView);
float bg[4] = { 1.0f, 0.0f, 0.0f, 1.0f };
context->ClearRenderTargetView(textureRenderTargetView, bg); //backgroundColor - red, green, blue, alpha
render();
context->OMSetRenderTargets(1, &myRenderTargetView, depthStencilView); //bind render target back to previous value (not to texture)
The render() method does not change (it works in step 1., why it doesn't work when I render to texture?) and ends with swapChain->Present(0, 0).
I know that ClearRenderTargetView affects my texture (without it, it's doesn't change color to red). But the rest of rendering either do not output to it or there's another problem.
Did I miss something?
I create the texture, shader resource view and render target for it based on this tutorial (maybe there is an error in my D3D11_TEXTURE2D_DESC?):
D3D11_TEXTURE2D_DESC textureDesc;
D3D11_RENDER_TARGET_VIEW_DESC renderTargetViewDesc;
D3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc;
//1. create render target
ZeroMemory(&textureDesc, sizeof(textureDesc));
//setup the texture description
//we will need to have this texture bound as a render target AND a shader resource
textureDesc.Width = size.getX();
textureDesc.Height = size.getY();
textureDesc.MipLevels = 1;
textureDesc.ArraySize = 1;
textureDesc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
textureDesc.SampleDesc.Count = 1;
textureDesc.Usage = D3D11_USAGE_DEFAULT;
textureDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
textureDesc.CPUAccessFlags = 0;
textureDesc.MiscFlags = 0;
//create the texture
device->CreateTexture2D(&textureDesc, NULL, &textureRenderTarget);
//2. create render target view
//setup the description of the render target view.
renderTargetViewDesc.Format = textureDesc.Format;
renderTargetViewDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
renderTargetViewDesc.Texture2D.MipSlice = 0;
//create the render target view
device->CreateRenderTargetView(textureRenderTarget, &renderTargetViewDesc, &textureRenderTargetView);
//3. create shader resource view
//setup the description of the shader resource view.
shaderResourceViewDesc.Format = textureDesc.Format;
shaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
shaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
shaderResourceViewDesc.Texture2D.MipLevels = 1;
//create the shader resource view.
device->CreateShaderResourceView(textureRenderTarget, &shaderResourceViewDesc, &texture);
The depth buffer:
D3D11_TEXTURE2D_DESC descDepth;
ZeroMemory(&descDepth, sizeof(descDepth));
descDepth.Width = width;
descDepth.Height = height;
descDepth.MipLevels = 1;
descDepth.ArraySize = 1;
descDepth.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
descDepth.SampleDesc.Count = sampleCount;
descDepth.SampleDesc.Quality = maxQualityLevel;
descDepth.Usage = D3D11_USAGE_DEFAULT;
descDepth.BindFlags = D3D11_BIND_DEPTH_STENCIL;
descDepth.CPUAccessFlags = 0;
descDepth.MiscFlags = 0;
And here goes the swap chain:
DXGI_SWAP_CHAIN_DESC sd;
ZeroMemory(&sd, sizeof(sd));
sd.BufferCount = 1;
sd.BufferDesc.Width = width;
sd.BufferDesc.Height = height;
sd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
sd.BufferDesc.RefreshRate.Numerator = numerator; //60
sd.BufferDesc.RefreshRate.Denominator = denominator; //1
sd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
sd.OutputWindow = *hwnd;
sd.SampleDesc.Count = sampleCount; //1 (and 0 for quality) to turn off multisampling
sd.SampleDesc.Quality = maxQualityLevel;
sd.Windowed = fullScreen ? FALSE : TRUE;
sd.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH; //allow full-screen switchin
// Set the scan line ordering and scaling to unspecified.
sd.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
sd.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
// Discard the back buffer contents after presenting.
sd.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
I create the default render target view that way:
//create a render target view
ID3D11Texture2D* pBackBuffer = NULL;
result = swapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&pBackBuffer);
ERROR_HANDLE(SUCCEEDED(result), L"The swapChain->GetBuffer() failed.", MOD_GRAPHIC);
//Create the render target view with the back buffer pointer.
result = device->CreateRenderTargetView(pBackBuffer, NULL, &myRenderTargetView);
After some debugging, as #Gnietschow suggested, I have found an error:
D3D11 ERROR: ID3D11DeviceContext::OMSetRenderTargets:
The RenderTargetView at slot 0 is not compatable with the
DepthStencilView. DepthStencilViews may only be used with
RenderTargetViews if the effective dimensions of the Views are equal,
as well as the Resource types, multisample count, and multisample
quality.
The RenderTargetView at slot 0 has (w:1680,h:1050,as:1), while the
Resource is a Texture2D with (mc:1,mq:0).
The DepthStencilView has
(w:1680,h:1050,as:1), while the Resource is a Texture2D with
(mc:8,mq:16).
So basically, my render target (texture) was not using anti-aliasing while my back buffer/depth buffer do.
I had to change SampleDesc.Count to 1 and SampleDesc.Quality to 0 in both DXGI_SWAP_CHAIN_DESC and D3D11_TEXTURE2D_DESC to match the values from texture to which I render. In other words I had to turn off anti-aliasing when rendering to texture.
I wonder, why render to texture does not support anti-aliasing? When I set SampleDesc.Count and SampleDesc.Quality to my standard values (8 and 16, those works fine on my GPU when rendering the scene) for my texture render target, the device->CreateTexture2D(...) fails with "invalid parameter" (even when I use those same values everywhere).
i am trying to render my Scene to a texture to use it in shaders for some post processing effect. I am not very experienced with this kind of stuff so i have some questions here..
First off all:
I am having this code in my OnCreateDevice() method:
D3D11_TEXTURE2D_DESC textureDesc;
D3D11_RENDER_TARGET_VIEW_DESC renderTargetViewDesc;
D3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc;
ZeroMemory(&textureDesc, sizeof(textureDesc));
textureDesc.Width = 800/2;
textureDesc.Height = 600/2;
textureDesc.MipLevels = 1;
textureDesc.ArraySize = 1;
textureDesc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
textureDesc.SampleDesc.Count = 1;
textureDesc.Usage = D3D11_USAGE_DEFAULT;
textureDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
textureDesc.CPUAccessFlags = 0;
textureDesc.MiscFlags = 0;
pd3dDevice->CreateTexture2D(&textureDesc, NULL, &renderTargetTextureMap);
renderTargetViewDesc.Format = textureDesc.Format;
renderTargetViewDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
renderTargetViewDesc.Texture2D.MipSlice = 0;
pd3dDevice->CreateRenderTargetView(renderTargetTextureMap, &renderTargetViewDesc, &renderTargetViewMap);
shaderResourceViewDesc.Format = textureDesc.Format;
shaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
shaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
shaderResourceViewDesc.Texture2D.MipLevels = 1;
pd3dDevice->CreateShaderResourceView(renderTargetTextureMap, &shaderResourceViewDesc, &shaderResourceViewMap);
I am creating a texture, a renderView and a resourceView. But where do i go from here? Do i have to define another buffer or does this work after all?
Specifically,
So i think i know i have to set the OMRenderTargets with: renderTargetViewMap and depthStencilView. But where do i have to set this? In every frame render call? Before or after i rendered the other objects in my scene? and my guess would be that i have to make a new shader to which i give this texture created to work with it.
Thanks for any insight :/
You will have to set it each time you want to render to the texture, yes (and as long as you haven't switched to another render target or the back buffer). Once you've rendered the data to the texture, set up the shader that processes this data and draw a screen aligned quad with this texture.