DirectX cuts Vertex and only draws last call - c++

This is what happens in DirectX:
What it should do is display 5 of those birds. It only does one (the last one) and also not correctly.
And this is how it really should look like (same buffers etc., but done in openGL):
So any idea what could cause the problem?
My calls are:
Initialize:
this->device->QueryInterface(__uuidof(IDXGIDevice), (LPVOID*)&dxgiDevice);
dxgiDevice->GetAdapter(&adapter);
adapter->GetParent(IID_PPV_ARGS(&factory));
ZeroMemory(&swapChainDescription, sizeof(swapChainDescription));
swapChainDescription.BufferDesc.Width = this->description.ResolutionWidth;
swapChainDescription.BufferDesc.Height = this->description.ResolutionHeight;
swapChainDescription.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
swapChainDescription.BufferDesc.RefreshRate.Numerator = 60;
swapChainDescription.BufferDesc.RefreshRate.Denominator = 1;
swapChainDescription.BufferDesc.Scaling = DXGI_MODE_SCALING_STRETCHED;
swapChainDescription.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_PROGRESSIVE;
swapChainDescription.SampleDesc.Count = 1;
swapChainDescription.SampleDesc.Quality = 0;
swapChainDescription.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDescription.BufferCount = 1;
swapChainDescription.OutputWindow = this->hwnd;
swapChainDescription.Windowed = !this->window->GetDescription().Fullscreen;
swapChainDescription.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
swapChainDescription.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH;
factory->CreateSwapChain(this->device, &swapChainDescription, &this->swapChain);
this->swapChain->GetBuffer(0, __uuidof(ID3D10Texture2D), (LPVOID*)&backBuffer);
this->device->CreateRenderTargetView(backBuffer, NULL, &this->backBufferView);
backBuffer->Release();
ZeroMemory(&depthStencilDescription, sizeof(depthStencilDescription));
depthStencilDescription.Width = this->description.ResolutionWidth;
depthStencilDescription.Height = this->description.ResolutionHeight;
depthStencilDescription.MipLevels = 1;
depthStencilDescription.ArraySize = 1;
depthStencilDescription.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
depthStencilDescription.SampleDesc.Count = 1;
depthStencilDescription.SampleDesc.Quality = 0;
depthStencilDescription.Usage = D3D10_USAGE_DEFAULT;
depthStencilDescription.BindFlags = D3D10_BIND_DEPTH_STENCIL;
depthStencilDescription.CPUAccessFlags = 0;
depthStencilDescription.MiscFlags = 0;
this->device->CreateTexture2D(&depthStencilDescription, 0, &depthStencilBuffer);
this->device->CreateDepthStencilView(depthStencilBuffer, 0, &this->depthStencilBufferView);
depthStencilBuffer->Release();
viewPort.Width = this->description.ResolutionWidth;
viewPort.Height = this->description.ResolutionHeight;
viewPort.MinDepth = 0.0f;
viewPort.MaxDepth = 1.0f;
viewPort.TopLeftX = 0;
viewPort.TopLeftY = 0;
this->device->RSSetViewports(1, &viewPort);
D3D10_BLEND_DESC BlendState;
ZeroMemory(&BlendState, sizeof(D3D10_BLEND_DESC));
BlendState.AlphaToCoverageEnable = FALSE;
BlendState.BlendEnable[0] = TRUE;
BlendState.SrcBlend = D3D10_BLEND_SRC_ALPHA;
BlendState.DestBlend = D3D10_BLEND_INV_SRC_ALPHA;
BlendState.BlendOp = D3D10_BLEND_OP_ADD;
BlendState.SrcBlendAlpha = D3D10_BLEND_ZERO;
BlendState.DestBlendAlpha = D3D10_BLEND_ZERO;
BlendState.BlendOpAlpha = D3D10_BLEND_OP_ADD;
BlendState.RenderTargetWriteMask[0] = D3D10_COLOR_WRITE_ENABLE_ALL;
this->device->CreateBlendState(&BlendState, &this->blendState);
Each Frame (broken down to):
this->device->OMSetBlendState(this->blendState, 0, 0xffffffff);
this->device->OMSetRenderTargets(1, &this->backBufferView, this->depthStencilBufferView);
this->device->ClearRenderTargetView(this->backBufferView, &Color.colors[0]);
this->device->ClearDepthStencilView(this->depthStencilBufferView, D3D10_CLEAR_DEPTH |
D3D10_CLEAR_STENCIL, 1.0f, 0);
in for loop (5 times):
ID3D10Buffer* buff = (ID3D10Buffer*)buffer->GetData();
UINT stride = buffer->GetStride();
UINT offset = 0;
this->device->IASetVertexBuffers(0, 1, &buff, &stride, &offset);
ID3D10Buffer* buff = (ID3D10Buffer*)buffer->GetData();
this->device->IASetIndexBuffer(buff, DXGI_FORMAT_R32_UINT, 0);
this->device->IASetPrimitiveTopology(D3D10_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
this->device->DrawIndexed(Indexes, 0, 0);
At the End
this->swapChain->Present(0, 0);

I got it fixed, here for others:
Vertices getting cut:
my near/far plane were to near to eachother. (they were 0.1f and 100.0f) Changed both values and now they work.
DirectX only drawing last call:
Had to apply material again for each model.
So doing
// Set coresponding input layout
this->device->IASetInputLayout(this->inputLayout);
// Apply specified pass of current technique
this->technique->GetPassByIndex(0)->Apply(0);

Related

trying to copy pixal data from cpu to gpu using map every thing runs fine but the screen comes empty directx

in my TextureHendler class, I create an empty texture using this
D3D11_TEXTURE2D_DESC textureDesc = { 0 };
textureDesc.Width = textureWidth;
textureDesc.Height = textureHeight;
textureDesc.Format = dxgiFormat;
textureDesc.Usage = D3D11_USAGE_DYNAMIC;
textureDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
textureDesc.MiscFlags = 0;
textureDesc.MipLevels = 1;
textureDesc.ArraySize = 1;
textureDesc.SampleDesc.Count = 1;
textureDesc.SampleDesc.Quality = 0;
textureDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
hr = m_deviceResource->GetD3DDevice()->CreateTexture2D(
&textureDesc,
nullptr,
&texture);
and on runtime, I want to load data from CPU memory using the code below
Platform::Array<unsigned char>^ datapointer = GetPixelBytes();
static constexpr int BYTES_IN_RGBA_PIXEL = 4;
auto rowspan = BYTES_IN_RGBA_PIXEL * width;
D3D11_MAPPED_SUBRESOURCE ms;
auto device_context = m_deviceResource->GetD3DDeviceContext();
device_context->Map(m_texture->GetTexture2d(), 0, D3D11_MAP_WRITE_DISCARD, 0, &ms);
uint8_t* mappedData = reinterpret_cast<uint8_t*>(ms.pData);
for (int i = 0; i < datapointer->Length; i++)
{
mappedData[i] = datapointer[i];
}
device_context->Unmap(m_texture->GetTexture2d(), 0);
everything runs fine but the output screen comes black
Update :
std::shared_ptr<TextureHendler> m_texture;
TextureHendler Holds
public:
ID3D11Texture2D* GetTexture2d() { return texture.Get(); }
private:
Microsoft::WRL::ComPtr<ID3D11Texture2D> texture;
and a load function which content is shown above
here are the sample code https://github.com/AbhishekSharma-SEG/Demo_DXPlayer thanks for the help

Draw Text with FreeType (DirectX 11)

So I would like to draw text with FreeType (on DirectX 11). The problem is that I don't really understand how to create the my_draw_bitmap function (I follow this tutorial).
I get all the glyphs then convert them to bitmap, but I don't see how to convert an FT_Bitmap* to a ID3D11Texture2D* (this would - in theory - allow me to render a text)
Here is my code :
FontLoader.cpp
void FontLoader::RenderText(ID3D11Device* p_device, Text* p_text, Math::Vec2 p_position)
{
const char* text = p_text->GetText();
FT_Face face = p_text->GetFont()->GetFace();
FT_GlyphSlot slot = face->glyph;
Math::Vec2 pen = p_position;
for (unsigned int i = 0; i < strlen(text); ++i)
{
if (FT_Load_Char(face, text[i], FT_LOAD_RENDER))
continue;
// draw to our target surface
CreateTextureFromBitmap(p_device, &slot->bitmap, Math::Vec2((float)slot->bitmap_left, (float)slot->bitmap_top));
// Increment pen position
pen._x += slot->advance.x >> 6;
}
}
ID3D11Texture2D* FontLoader::CreateTextureFromBitmap(ID3D11Device* p_device, FT_Bitmap* p_bitmap, Math::Vec2 p_position)
{
D3D11_TEXTURE2D_DESC textureDesc;
textureDesc.Width = p_bitmap->width;
textureDesc.Height = p_bitmap->pitch;
textureDesc.MipLevels = textureDesc.ArraySize = 1;
textureDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
textureDesc.SampleDesc.Count = 1;
textureDesc.Usage = D3D11_USAGE_DYNAMIC;
textureDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
textureDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
textureDesc.MiscFlags = 0;
ID3D11Texture2D *texture2D = NULL;
// don't know how and when to use p_bitmap
p_device->CreateTexture2D(&textureDesc, NULL, &texture2D);
return texture2D;
}
Thanks a lot !

DirectX 10 swap chain scaling

I want to disable swap chain scaling on windows.
I'm rendering 1600x900 quad on the top of window (1366x768) but it's streched.
Desc_.BufferCount = 1;
Desc_.OutputWindow = windowWnd;
Desc_.Windowed = true;
Desc_.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
Desc_.BufferDesc.Height = height;
Desc_.BufferDesc.Width = width;
Desc_.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
Desc_.SampleDesc.Count = 1;
Desc_.SampleDesc.Quality = 0;
Desc_.BufferDesc.RefreshRate.Numerator = 60;
Desc_.BufferDesc.RefreshRate.Denominator = 1;
swapChainDesc_.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_PROGRESSIVE;
Desc_.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
Desc_.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
Desc_.Flags = 0;
this is initialization of DXGI swap chain

Failed to create depth stencil in DirectX11

i have the following code:
D3D11_TEXTURE2D_DESC descDepth;
memset(&descDepth, 0, sizeof(descDepth));
descDepth.Width = width;
descDepth.Height = height;
descDepth.MipLevels = 1;
descDepth.ArraySize = 1;
descDepth.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
descDepth.SampleDesc.Count = 1;
descDepth.SampleDesc.Quality = 0;
descDepth.Usage = D3D11_USAGE_DEFAULT;
descDepth.BindFlags = D3D11_BIND_DEPTH_STENCIL;
descDepth.CPUAccessFlags = 0;
descDepth.MiscFlags = 0;
hr = g_d3dDevice->CreateTexture2D(&descDepth, nullptr, &g_depthStencil);
and invalid argument error. I really don't know what's the problem. I used this code before and everything worked fine.
If you turned the debug layer on at the device creation, you should find in the log that DXGI_FORMAT_D24_UNORM_S8_UINT is not a texture format, You need to use DXGI_FORMAT_R24_UNORM_X8_TYPELESS for the texture and the other one for the depth stencil view.

I am trying to set a chess board texture for a square

I am trying to set a chess board texture with green and blue texels for a square.
Instead my square is black.
I don't get any compile or runtime errors.
Is there any problem with my texture code?
vector<unsigned char> texelsBuffer;
for(int i = 0; i < N; ++i)
for(int j = 0; j < N; ++j)
{
texelsBuffer.push_back(0.0f);
if((i+j) % 2 == 0) {
texelsBuffer.push_back(1.0f);
texelsBuffer.push_back(0.0f);
}
else {
texelsBuffer.push_back(0.0f);
texelsBuffer.push_back(1.0f);
}
texelsBuffer.push_back(1.0f);
}
ID3D11Texture2D* tex = 0;
D3D11_TEXTURE2D_DESC textureDescr;
ZeroMemory(&textureDescr, sizeof(D3D11_TEXTURE2D_DESC));
textureDescr.ArraySize = 1;
textureDescr.BindFlags = D3D11_BIND_SHADER_RESOURCE;
textureDescr.CPUAccessFlags = 0;
textureDescr.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
textureDescr.Height = N;
textureDescr.Width = N;
textureDescr.MipLevels = 1;
textureDescr.MiscFlags = 0;
textureDescr.SampleDesc.Count = 1;
textureDescr.SampleDesc.Quality = 0;
textureDescr.Usage = D3D11_USAGE_IMMUTABLE;
D3D11_SUBRESOURCE_DATA texInitData;
ZeroMemory(&texInitData, sizeof(D3D11_SUBRESOURCE_DATA));
texInitData.pSysMem = (void*)&texelsBuffer[0];
texInitData.SysMemPitch = sizeof(unsigned char) * N;
texInitData.SysMemSlicePitch = 0;
g_pd3dDevice->CreateTexture2D(&textureDescr, &texInitData, &tex);
g_pd3dDevice->CreateShaderResourceView(tex, NULL, &g_pTextureRV);
Or my sampler code?
D3D11_SAMPLER_DESC sampDesc;
ZeroMemory( &sampDesc, sizeof(sampDesc) );
sampDesc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
sampDesc.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
sampDesc.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
sampDesc.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
sampDesc.ComparisonFunc = D3D11_COMPARISON_NEVER;
sampDesc.MinLOD = 0;
sampDesc.MaxLOD = D3D11_FLOAT32_MAX;
g_pd3dDevice->CreateSamplerState(&sampDesc, &g_pSamplerLinear);
You're populating texelsBuffer with floating-point values. Use 255 or 0xFF to set the channel to 1.0 in UNORM encoding.
You should also consider making your chess squares larger than one texel; unless you are always going to be rendering the texture at a 1:1 pixel:texel ratio, you will end up blending between the two colors for most sample locations.