Need Help Understanding Invalid Argument Error When Creating Vertex Shader - c++

I'm just starting out in DirectX 11, and I've run into a problem. I'm loading an extremely simple shader in an HSDL file:
struct VOut{
float4 position : SV_POSITION;
float4 color : COLOR;
};
VOut VS_Main( float4 pos : POSITION, float4 color : COLOR )
{
VOut output;
output.position = pos;
output.color = color;
return output;
}
float4 PS_Main( float4 pos : SV_POSITION, float4 color : COLOR ) : SV_TARGET
{
return color;
}
Using the following code (that DOES NOT trigger the if(FAILED(result) statement):
bool PathfindingDemoBase::CompileD3DShader(char* filePath, char* entry, char* shaderModel, ID3DBlob** buffer){
DWORD shaderFlags = D3DCOMPILE_ENABLE_STRICTNESS;
#if defined(DEBUG) || defined(_DEBUG)
shaderFlags |= D3DCOMPILE_DEBUG;
#endif
ID3DBlob* errorBuffer = 0;
HRESULT result;
result = D3DX11CompileFromFile(filePath, 0, 0, entry, shaderModel,
0, 0, 0, buffer, &errorBuffer, 0);
if(FAILED(result)){
if(errorBuffer != 0){
OutputDebugStringA((char*)errorBuffer->GetBufferPointer());
errorBuffer->Release();
}
return false;
}
if(errorBuffer != 0){
errorBuffer->Release();
}
return true;
}
But what does happen is in the LoadContent function, the commented line (//<---) triggers an "E_INVALIDARG" result (in d3dResult), but the vsBuffer-> calls aren't returning null :/
bool AStarDemo::LoadContent(){
ID3D10Blob* vsBuffer = 0;
bool compileResult = CompileD3DShader("ReturnColor.hlsl", "VS_Main", "vs_5_0", &vsBuffer);
if(compileResult == false){
MessageBox(0, "Error loading vertex shader!", "Compile Error", MB_OK);
return false;
}
HRESULT d3dResult;
d3dResult = d3dDevice->CreateVertexShader(vsBuffer->GetBufferPointer(),
vsBuffer->GetBufferSize(), 0, &returnColorVS);
//actually set the vertex shader
d3dContext->VSSetShader(returnColorVS, 0, 0);
if(FAILED(d3dResult)){
if(vsBuffer){
vsBuffer->Release();
}
return false;
}
D3D11_INPUT_ELEMENT_DESC solidColorLayout[] ={
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 }
};
unsigned int totalLayoutElements = ARRAYSIZE(solidColorLayout);
d3dResult = d3dDevice->CreateInputLayout(solidColorLayout, totalLayoutElements,
vsBuffer->GetBufferPointer(), vsBuffer->GetBufferSize(), &inputLayout);
vsBuffer->Release();
if(FAILED(d3dResult)){
return false;
}
ID3D10Blob* psBuffer = 0;
compileResult = CompileD3DShader("ReturnColor.hlsl", "PS_Main", "ps_5_0", &psBuffer);
//actually set the pixel shader
d3dContext->PSSetShader(returnColorPS, 0, 0);
if(compileResult == false){
MessageBox(0, "Error loading pixel shader!", "Compile Error", MB_OK);
return false;
}
d3dResult = d3dDevice->CreatePixelShader(psBuffer->GetBufferPointer(),
psBuffer->GetBufferSize(), 0, &returnColorPS);
psBuffer->Release();
if(FAILED(d3dResult)){
return false;
}
Vertex vertices[4 * TOTAL_NUMBER_GRID_SQUARES];
gridSquares = new GridSquare[TOTAL_NUMBER_GRID_SQUARES];
//for each grid square
for(int i = 0; i < TOTAL_NUMBER_GRID_SQUARES; i++){
XMFLOAT4 color = ((i%2) == 0) ? GREEN : RED;
//create a grid square object
GridSquare gs;
gs.TopLeft = Vertex( XMFLOAT3(0.5f * i, 0.5f * i, Z_LEVEL),
color);
gs.TopRight = Vertex( XMFLOAT3( 0.5f * i, -0.5f * i, Z_LEVEL),
color);
gs.BottomRight = Vertex( XMFLOAT3(-0.5f * i, -0.5f * i, Z_LEVEL),
color);
gs.BottomLeft = Vertex( XMFLOAT3(-0.5f * i, 0.5f * i, Z_LEVEL),
color);
gs.type = ((i%2) == 0) ? prey : hunter;
gridSquares[i] = gs;
vertices[4*i] = gs.TopLeft;
vertices[(4*i)+1] = gs.TopRight;
vertices[(4*i)+2] = gs.BottomRight;
vertices[(4*i)+3] = gs.BottomLeft;
}
D3D11_BUFFER_DESC vertexDesc;
ZeroMemory(&vertexDesc, sizeof(vertexDesc));
vertexDesc.Usage = D3D11_USAGE_DYNAMIC;
vertexDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
vertexDesc.ByteWidth = 4 * TOTAL_NUMBER_GRID_SQUARES * sizeof(Vertex);
vertexDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
D3D11_SUBRESOURCE_DATA resourceData;
ZeroMemory(&resourceData, sizeof(resourceData));
resourceData.pSysMem = vertices;
d3dResult = d3dDevice->CreateBuffer(&vertexDesc, &resourceData, &vertexBuffer);
if(FAILED(d3dResult)){
return false;
}
return true;
}
Any advice would be greatly appreciated! :)

Found the answer after digging around a bit- apparently there are some problems with using "ps_5_0" and "vs_5_0", even though my machine runs DirectX 11

Related

Can get the cube to pop up (DirectX 11). Cube is not showing, only screen is being cleared

I am trying to make a simple d3d11 program to rotate a cube.
But ultimately the cube doesnt seem to appear.
Only the screen is being cleared to blue but the cube doesnot show up.
I have been using this as my source: https://learn.microsoft.com/en-us/windows/win32/direct3dgetstarted/getting-started-with-a-directx-game
The structure of my project is a
MainClass.cpp, MainClass.h (Handles window initializtion)
DeviceResources.cpp, DeviceResources.h (The Device Resources include device, context, etc.)
Renderer.cpp, Renderer.h (The renderer loads geometry and shaders. Most probably this is where i am going wrong)
Here is my Renderer.h :
#pragma once
#include <memory>
#include <Windows.h>
#include <DirectXMath.h>
#include "DeviceResources.h"
class Renderer
{
public:
Renderer(std::shared_ptr<DeviceResources> deviceResources);
~Renderer();
void CreateDeviceDependentResources();
void CreateWindowSizeDependentResources();
void Update();
void Render();
private:
HRESULT CreateShaders();
HRESULT CreateCube();
void CreateViewAndPerspective();
bool m_done = false;
std::shared_ptr<DeviceResources> m_deviceResources;
typedef struct _constantBufferStruct {
DirectX::XMFLOAT4X4 world;
DirectX::XMFLOAT4X4 view;
DirectX::XMFLOAT4X4 projection;
} ConstantBufferStruct;
static_assert((sizeof(ConstantBufferStruct) % 16) == 0, "Constant Buffer size must be 16-byte aligned");
typedef struct _vertexPositionColor {
DirectX::XMFLOAT3 pos;
DirectX::XMFLOAT3 color;
} VertexPositionColor;
typedef struct _vertexPositionColorTangent {
DirectX::XMFLOAT3 pos;
DirectX::XMFLOAT3 color;
DirectX::XMFLOAT3 tangent;
} VertexPositionColorTangent;
ConstantBufferStruct m_constantBufferData;
unsigned int m_indexCount;
unsigned int m_frameCount;
Microsoft::WRL::ComPtr<ID3D11Buffer> m_pVertexBuffer;
Microsoft::WRL::ComPtr<ID3D11Buffer> m_pIndexBuffer;
Microsoft::WRL::ComPtr<ID3D11VertexShader> m_pVertexShader;
Microsoft::WRL::ComPtr<ID3D11PixelShader> m_pPixelShader;
Microsoft::WRL::ComPtr<ID3D11InputLayout> m_pInputLayout;
Microsoft::WRL::ComPtr<ID3D11InputLayout> m_pInputLayoutExtended;
Microsoft::WRL::ComPtr<ID3D11Buffer> m_pConstantBuffer;
};
And here is my Renderer.cpp code :
#include "Renderer.h"
#include <fstream>
#include <ppltasks.h>
#include <stdexcept>
Renderer::Renderer(std::shared_ptr<DeviceResources> deviceResources) :
m_deviceResources(deviceResources),
m_frameCount(0)
{
m_frameCount = 0;
}
Renderer::~Renderer() {}
void Renderer::CreateDeviceDependentResources() {
auto CreateShaderTask = Concurrency::create_task([this]() {CreateShaders();
m_done = true;
});
auto CreateCubeTask = CreateShaderTask.then([this]() {CreateCube(); });
}
void Renderer::CreateWindowSizeDependentResources() {
CreateViewAndPerspective();
}
HRESULT Renderer::CreateShaders() {
HRESULT hr = S_OK;
ID3D11Device* device = m_deviceResources->GetDevice();
FILE* vShader, * pShader;
BYTE* bytes;
size_t destSize = 4096;
size_t bytesRead = 0;
bytes = new BYTE[destSize];
fopen_s(&vShader, "cubeVertexShader.cso", "rb");
bytesRead = fread_s(bytes, destSize, 1, 4096, vShader);
hr = device->CreateVertexShader(
bytes,
bytesRead,
nullptr,
&m_pVertexShader
);
D3D11_INPUT_ELEMENT_DESC iaDesc[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT,
0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "COLOR", 0, DXGI_FORMAT_R32G32B32_FLOAT,
0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 },
};
hr = device->CreateInputLayout(iaDesc,ARRAYSIZE(iaDesc), bytes, bytesRead, &m_pInputLayout);
delete bytes;
bytes = new BYTE[destSize];
bytesRead = 0;
fopen_s(&pShader, "cubePixelShader.cso", "rb");
bytesRead = fread_s(bytes, destSize, 1, 4096, pShader);
hr = device->CreatePixelShader(bytes, bytesRead, nullptr, &m_pPixelShader);
CD3D11_BUFFER_DESC cbDesc(sizeof(ConstantBufferStruct), D3D11_BIND_CONSTANT_BUFFER );
hr = device->CreateBuffer(&cbDesc, nullptr, &m_pConstantBuffer);
if (FAILED(hr))
throw std::exception("Failed to Create Constant Buffer");
fclose(vShader);
fclose(pShader);
return hr;
}
HRESULT Renderer::CreateCube() {
HRESULT hr = S_OK;
ID3D11Device* device = m_deviceResources->GetDevice();
VertexPositionColor CubeVertices[] =
{
{DirectX::XMFLOAT3(-0.5f,-0.5f,-0.5f), DirectX::XMFLOAT3(0, 0, 0),},
{DirectX::XMFLOAT3(-0.5f,-0.5f, 0.5f), DirectX::XMFLOAT3(0, 0, 1),},
{DirectX::XMFLOAT3(-0.5f, 0.5f,-0.5f), DirectX::XMFLOAT3(0, 1, 0),},
{DirectX::XMFLOAT3(-0.5f, 0.5f, 0.5f), DirectX::XMFLOAT3(0, 1, 1),},
{DirectX::XMFLOAT3(0.5f,-0.5f,-0.5f), DirectX::XMFLOAT3(1, 0, 0),},
{DirectX::XMFLOAT3(0.5f,-0.5f, 0.5f), DirectX::XMFLOAT3(1, 0, 1),},
{DirectX::XMFLOAT3(0.5f, 0.5f,-0.5f), DirectX::XMFLOAT3(1, 1, 0),},
{DirectX::XMFLOAT3(0.5f, 0.5f, 0.5f), DirectX::XMFLOAT3(1, 1, 1),},
};
unsigned short CubeIndices[] =
{
0,2,1, // -x
1,2,3,
4,5,6, // +x
5,7,6,
0,1,5, // -y
0,5,4,
2,6,7, // +y
2,7,3,
0,4,6, // -z
0,6,2,
1,3,7, // +z
1,7,5,
};
m_indexCount = ARRAYSIZE(CubeIndices);
CD3D11_BUFFER_DESC vbDesc(sizeof(CubeVertices), D3D11_BIND_VERTEX_BUFFER);
D3D11_SUBRESOURCE_DATA vData;
ZeroMemory(&vData, sizeof(D3D11_SUBRESOURCE_DATA));
vData.pSysMem = CubeVertices;
vData.SysMemPitch = 0;
vData.SysMemSlicePitch = 0;
hr = device->CreateBuffer(&vbDesc, &vData, &m_pVertexBuffer);
CD3D11_BUFFER_DESC ibDesc(sizeof(CubeIndices), D3D11_BIND_INDEX_BUFFER);
D3D11_SUBRESOURCE_DATA iData;
ZeroMemory(&iData, sizeof(D3D11_SUBRESOURCE_DATA));
iData.pSysMem = CubeIndices;
iData.SysMemPitch = 0;
iData.SysMemSlicePitch = 0;
hr = device->CreateBuffer(&ibDesc, &iData, &m_pIndexBuffer);
return hr;
}
void Renderer::CreateViewAndPerspective() {
// Use DirectXMath to create view and perspective matrices.
DirectX::XMVECTOR eye = DirectX::XMVectorSet(0.0f, 0.7f, 1.5f, 0.f);
DirectX::XMVECTOR at = DirectX::XMVectorSet(0.0f, -0.1f, 0.0f, 0.f);
DirectX::XMVECTOR up = DirectX::XMVectorSet(0.0f, 1.0f, 0.0f, 0.f);
DirectX::XMStoreFloat4x4(
&m_constantBufferData.view,
DirectX::XMMatrixTranspose(
DirectX::XMMatrixLookAtRH(
eye,
at,
up
)
)
);
float aspectRatio = m_deviceResources->GetAspectRatio();
DirectX::XMStoreFloat4x4(
&m_constantBufferData.projection,
DirectX::XMMatrixTranspose(
DirectX::XMMatrixPerspectiveFovRH(
DirectX::XMConvertToRadians(70),
aspectRatio,
0.01f,
100.0f
)
)
);
}
void Renderer::Update() {
DirectX::XMStoreFloat4x4(
&m_constantBufferData.world,
DirectX::XMMatrixTranspose(
DirectX::XMMatrixRotationY(
DirectX::XMConvertToRadians(
(float)m_frameCount++
)
)
)
);
if (m_frameCount == MAXUINT) m_frameCount = 0;
}
void Renderer::Render() {
while (!m_done) {
}
ID3D11DeviceContext* context = m_deviceResources->GetDeviceContext();
ID3D11RenderTargetView* renderTarget = m_deviceResources->GetRenderTarget();
ID3D11DepthStencilView* depthStencil = m_deviceResources->GetDepthStencil();
context->UpdateSubresource(m_pConstantBuffer.Get(), 0, nullptr, &m_constantBufferData, 0, 0);
const float blue[4] = { 0.2f, 0.3f, 0.8f, 1.0f};
context->ClearRenderTargetView(renderTarget, blue);
context->ClearDepthStencilView(depthStencil, D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 1.0f, 0);
context->OMSetRenderTargets(1, &renderTarget, depthStencil);
UINT stride = sizeof(VertexPositionColor);
UINT offset = 0;
context->IASetVertexBuffers(
0,
1,
m_pVertexBuffer.GetAddressOf(),
&stride,
&offset
);
context->IASetIndexBuffer(m_pIndexBuffer.Get(), DXGI_FORMAT_R16_UINT, 0);
context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
context->IASetInputLayout(m_pInputLayout.Get());
context->VSSetShader(m_pVertexShader.Get(), nullptr, 0);
context->VSSetConstantBuffers(0, 1, m_pConstantBuffer.GetAddressOf());
context->PSSetShader(m_pPixelShader.Get(), nullptr, 0);
//context->DrawIndexed(m_indexCount, 0, 0);
context->Draw(3, 0);
}
For more reference here are the shaders:-
Vertex Shader
cbuffer ModelViewProjectionBuffer : register(b0)
{
matrix model;
matrix view;
matrix projection;
};
float4 main(float3 Position:POSITION, float3 Color:COLOR) : SV_POSITION// main is the default function name
{
float4 pos = float4(Position, 1.0f);
// Transform the position from object space to homogeneous projection space
pos = mul(pos, model);
pos = mul(pos, view);
pos = mul(pos, projection);
return pos;
}
Pixel Shader:
float4 main(float4 position:SV_POSITION) : SV_TARGET
{
return float4(0.9f, 0.4f, 0.2f, 1.0f);
}
Image of the output
Please help me analyze where i am going wrong.
Moreover, i am using visual studio 2019.
Edit: Some were asking a for a complete reproduction so here is the visual studio solution
https://drive.google.com/file/d/1jt6fQgbRElpc9AYpbhYOyp-HCQL3WmEF/view?usp=sharing
You're doing invalid memory accesses when reading the vertex and pixel buffers.
Also, on my computer, both shaders have a dozen of kbytes whilst the memory buffer you're reading them to has only 4096 bytes.
To fix your problem, increase the size of the memory buffer you're using to read the shader bytecode from disk and don't forget to update the elementcount in fread_s().
e.g.,
Renderer.cpp # 35
size_t destSize = 1024 * 1024; // CHANGE THE SIZE OF YOUR MEMORY BUFFER HERE
size_t bytesRead = 0;
bytes = new BYTE[destSize];
fopen_s(&vShader, "cubeVertexShader.cso", "rb");
bytesRead = fread_s(bytes, destSize * sizeof(BYTE), sizeof(BYTE), destSize, vShader);
hr = device->CreateVertexShader(
bytes,
bytesRead,
nullptr,
&m_pVertexShader
);
Do the same where you're reading the pixel shader.
PRO TIP: Pay attention to the debug output and you'll see important messages from d3d debug layer, like the one telling you that both shaders failed to be created :)

FreeType generates multiple textures

I'm trying to use FreeType under DirectX 11, in order to draw some text on my screen. For the moment i'm able to convert a FT_Bitmap* to a ID3D11Texture2D*, then I create a ID3D11ShaderResourceView*.
I then use a shader to draw this shader resource view. The problem is that this code only draws the last letter of the text 4 times, and I don't really see why.
First, here is how I render a UI Element :
Graph.cpp :
void Graph::DrawSprite(Object* p_object)
{
/*...*/
// Problem here
Text* text = nullptr;
text = p_object->GetComponent<Text>();
if (text)
_graphicAPI->Draw(text->GetBuffer());
/*...*/
}
GraphicAPI :
void GraphicAPI::Draw(const TextBuffer& p_text)
{
_shaders._orthoWindow->Render(_deviceContext);
_shaders._sprite->Render(_deviceContext, _shaders._orthoWindow->GetIndexCount(),
_managers._font->GetTextureView(p_text._font), p_text._matrix, _camera2D);
}
SpriteShader.cpp :
bool SpriteShader::Render(ID3D11DeviceContext* p_deviceContext, unsigned int p_indexCount, ID3D11ShaderResourceView* p_texView,
const Math::Mat4& p_matrix, const CameraBuffer& p_camera)
{
bool result = SetShaderParameters(p_deviceContext, p_texView, p_matrix, p_camera._view, p_camera._projection);
if (!result)
return false;
RenderShader(p_deviceContext, p_indexCount);
return true;
}
bool SpriteShader::SetShaderParameters(ID3D11DeviceContext* p_deviceContext, ID3D11ShaderResourceView* p_texView,
const Math::Mat4& p_worldMatrix, const Math::Mat4& p_viewMatrix, const Math::Mat4& p_projectionMatrix)
{
HRESULT result;
D3D11_MAPPED_SUBRESOURCE mappedResource;
unsigned int bufferNumber;
MatrixBufferType* dataMatrixPtr;
result = p_deviceContext->Map(_matrixBuffer, 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedResource);
if (FAILED(result))
return false;
dataMatrixPtr = (MatrixBufferType*)mappedResource.pData;
dataMatrixPtr->world = p_worldMatrix;
dataMatrixPtr->view = p_viewMatrix;
dataMatrixPtr->projection = p_projectionMatrix;
p_deviceContext->Unmap(_matrixBuffer, 0);
bufferNumber = 0;
p_deviceContext->VSSetConstantBuffers(bufferNumber, 1, &_matrixBuffer);
p_deviceContext->PSSetShaderResources(0, 1, &p_texView);
return true;
}
void SpriteShader::RenderShader(ID3D11DeviceContext* p_deviceContext, unsigned int p_indexCount)
{
p_deviceContext->IASetInputLayout(_layout);
p_deviceContext->VSSetShader(_vertexShader, NULL, 0);
p_deviceContext->PSSetShader(_pixelShader, NULL, 0);
p_deviceContext->PSSetSamplers(0, 1, &_sampleState);
p_deviceContext->DrawIndexed(p_indexCount, 0, 0);
}
Here is how I load a TTF font and convert it to a ID3D11ShaderResourceView*.
FontManager.cpp :
bool FontManager::LoadFont(ID3D11Device* p_device, const std::string p_extension, const char* p_fileName)
{
if (p_extension == ".ttf")
{
Font* font = new Font();
if (!font)
return false;
if (!font->LoadTTF(p_device, _library, p_fileName))
{
delete font;
return false;
}
// sets text (tmp)
font->RenderFont(p_device, "ASDASD", Math::Vec2(100, 100));
_fonts.push_back(font);
return true;
}
return false;
}
For the Font class, I based myself on this tutorial. CreateShaderResourceView() is my_draw_bitmap().
Font.cpp :
bool Font::LoadTTF(ID3D11Device* p_device, FT_Library p_library, const char* p_fileName)
{
_fileName = p_fileName;
if (FT_New_Face(p_library, p_fileName, 0, &_face))
return false;
if (FT_Set_Pixel_Sizes(_face, 0, DEFAULT_FONT_SIZE))
return false;
return true;
}
void Font::RenderFont(ID3D11Device* p_device, const char* p_text, Math::Vec2 p_position)
{
FT_GlyphSlot slot = _face->glyph;
FT_Vector pen;
pen.x = p_position._x;
pen.y = p_position._y;
FT_Matrix matrix;
float angle = (/*90*/0.0 / 360) * 3.14159 * 2;
matrix.xx = (FT_Fixed)(cos(angle) * 0x10000L);
matrix.xy = (FT_Fixed)(-sin(angle) * 0x10000L);
matrix.yx = (FT_Fixed)(sin(angle) * 0x10000L);
matrix.yy = (FT_Fixed)(cos(angle) * 0x10000L);
for (unsigned int i = 0; i < strlen(p_text); ++i)
{
FT_Set_Transform(_face, &matrix, &pen);
if (FT_Load_Char(_face, p_text[i], FT_LOAD_RENDER))
continue;
if (!CreateShaderResourceView(p_device, &slot->bitmap))
return;
// Increment pen position
pen.x += slot->advance.x >> 6;
}
}
bool Font::CreateShaderResourceView(ID3D11Device* p_device, FT_Bitmap* p_bitmap)
{
D3D11_TEXTURE2D_DESC textureDesc;
ZeroMemory(&textureDesc, sizeof(textureDesc));
textureDesc.Width = p_bitmap->width;
textureDesc.Height = p_bitmap->rows;
textureDesc.MipLevels = 1;
textureDesc.ArraySize = 1;
textureDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
textureDesc.SampleDesc.Count = 1;
textureDesc.SampleDesc.Quality = 0;
textureDesc.Usage = D3D11_USAGE_DEFAULT;
textureDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
textureDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
textureDesc.MiscFlags = 0;
ID3D11Texture2D* texture2D;
ZeroMemory(&texture2D, sizeof(texture2D));
D3D11_SUBRESOURCE_DATA resourceData;
resourceData.pSysMem = p_bitmap->buffer;
resourceData.SysMemPitch = p_bitmap->pitch;
resourceData.SysMemSlicePitch = 0;
HRESULT res = p_device->CreateTexture2D(&textureDesc, &resourceData, &texture2D);
if (FAILED(res))
return false;
D3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc;
shaderResourceViewDesc.Format = textureDesc.Format;
shaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
shaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
shaderResourceViewDesc.Texture2D.MipLevels = 1;
HRESULT result = p_device->CreateShaderResourceView(texture2D, &shaderResourceViewDesc, &_shaderResourceView);
if (FAILED(result))
return false;
return true;
}
And the shaders :
Sprite.ps (I use this shader to draw UI elements):
Texture2D spriteTexture;
SamplerState SampleType;
struct PixelInputType
{
float4 position : SV_POSITION;
float2 tex : TEXCOORD0;
};
float4 SpritePixelShader(PixelInputType input) : SV_TARGET
{
float4 textureColor;
textureColor = spriteTexture.Sample(SampleType, input.tex);
return textureColor;
}
Sprite.vs :
cbuffer MatrixBuffer
{
matrix worldMatrix;
matrix viewMatrix;
matrix projectionMatrix;
};
struct VertexInputType
{
float4 position : POSITION;
float2 tex : TEXCOORD0;
};
struct PixelInputType
{
float4 position : SV_POSITION;
float2 tex : TEXCOORD0;
};
PixelInputType SpriteVertexShader(VertexInputType input)
{
PixelInputType output;
input.position.w = 1.0f;
output.position = mul(input.position, worldMatrix);
output.position = mul(output.position, viewMatrix);
output.position = mul(output.position, projectionMatrix);
output.tex = input.tex;
return output;
}
Here is what I get :
This guy (here) had the same issue, but the solution doesn't work for me. Does anyone know how to solve this ? I've been stuck on this for a week now...
It seems that inside of for (unsigned int i = 0; i < strlen(p_text); ++i) you try to render font letter-by-letter and recreate shader resource view on each iteration. So at the end you are left with shader resource view containing last letter texture view.

C++ DirectX11 Texture On Terrain Not Rendering Properly

I am developing a game engine using the Rastertek tutorials.
My problem is that the terrain texture isn't loading properly.
Pixel Shader:
Texture2D shaderTexture;
SamplerState SampleType;
cbuffer LightBuffer
{
float4 ambientColor;
float4 diffuseColor;
float3 lightDirection;
float padding;
};
//////////////
// TYPEDEFS //
//////////////
struct PixelInputType
{
float4 position : SV_POSITION;
float2 tex : TEXCOORD0;
float3 normal : NORMAL;
};
////////////////////////////////////////////////////////////////////////////////
// Pixel Shader
////////////////////////////////////////////////////////////////////////////////
float4 TerrainPixelShader(PixelInputType input) : SV_TARGET
{
float4 textureColor;
float3 lightDir;
float lightIntensity;
float4 color;
// Sample the pixel color from the texture using the sampler at this texture coordinate location.
textureColor = shaderTexture.Sample(SampleType, input.tex);
// Set the default output color to the ambient light value for all pixels.
color = ambientColor;
// Invert the light direction for calculations.
lightDir = -lightDirection;
// Calculate the amount of light on this pixel.
lightIntensity = saturate(dot(input.normal, lightDir));
if(lightIntensity > 0.0f)
{
// Determine the final diffuse color based on the diffuse color and the amount of light intensity.
color += (diffuseColor * lightIntensity);
}
// Saturate the final light color.
color = saturate(color);
// Multiply the texture pixel and the final light color to get the result.
color = color * textureColor;
Vertex Shader:
cbuffer MatrixBuffer
{
matrix worldMatrix;
matrix viewMatrix;
matrix projectionMatrix;
};
//////////////
// TYPEDEFS //
//////////////
struct VertexInputType
{
float4 position : POSITION;
float2 tex : TEXCOORD0;
float3 normal : NORMAL;
};
struct PixelInputType
{
float4 position : SV_POSITION;
float2 tex : TEXCOORD0;
float3 normal : NORMAL;
};
////////////////////////////////////////////////////////////////////////////////
// Vertex Shader
////////////////////////////////////////////////////////////////////////////////
PixelInputType TerrainVertexShader(VertexInputType input)
{
PixelInputType output;
// Change the position vector to be 4 units for proper matrix calculations.
input.position.w = 1.0f;
// Calculate the position of the vertex against the world, view, and projection matrices.
output.position = mul(input.position, worldMatrix);
output.position = mul(output.position, viewMatrix);
output.position = mul(output.position, projectionMatrix);
// Store the texture coordinates for the pixel shader.
output.tex = input.tex;
// Calculate the normal vector against the world matrix only.
output.normal = mul(input.normal, (float3x3)worldMatrix);
// Normalize the normal vector.
output.normal = normalize(output.normal);
return output;
}
Terrain Shader Class:
bool TerrainShaderClass::SetShaderParameters(ID3D11DeviceContext* deviceContext, D3DXMATRIX world, D3DXMATRIX view,
D3DXMATRIX projection, D3DXVECTOR4 ambientColor, D3DXVECTOR4 diffuseColor, D3DXVECTOR3 lightDirection,
ID3D11ShaderResourceView* texture)
{
HRESULT result;
D3D11_MAPPED_SUBRESOURCE mappedResource;
unsigned int bufferNumber;
MatrixBufferType* matrixData;
LightBufferType* lightData;
D3DXMatrixTranspose(&world, &world);
D3DXMatrixTranspose(&view, &view);
D3DXMatrixTranspose(&projection, &projection);
result = deviceContext->Map(m_matrixBuffer, 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedResource);
if (FAILED(result))
{
return false;
}
matrixData = (MatrixBufferType*)mappedResource.pData;
matrixData->world = world;
matrixData->view = view;
matrixData->projection = projection;
deviceContext->Unmap(m_matrixBuffer, 0);
bufferNumber = 0;
deviceContext->VSSetConstantBuffers(bufferNumber, 1, &m_matrixBuffer);
deviceContext->Map(m_lightBuffer, 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedResource);
lightData = (LightBufferType*)mappedResource.pData;
lightData->ambientColor = ambientColor;
lightData->diffuseColor = diffuseColor;
lightData->lightDirection = lightDirection;
lightData->padding = 0.0f;
deviceContext->Unmap(m_lightBuffer, 0);
bufferNumber = 0;
deviceContext->PSSetConstantBuffers(bufferNumber, 1, &m_lightBuffer);
deviceContext->PSSetShaderResources(0, 1, &texture);
return true;
}
void TerrainShaderClass::OutputShaderErrorMessage(ID3D10Blob* errorMessage, HWND hwnd, LPCSTR shaderFileName)
{
char* compileErrors = (char*)(errorMessage->GetBufferPointer());
unsigned long bufferSize = errorMessage->GetBufferSize();
ofstream fout;
fout.open("shader-error.txt");
for (unsigned long i = 0; i < bufferSize; i++)
{
fout << compileErrors[i];
}
fout.close();
errorMessage->Release();
errorMessage = nullptr;
MessageBox(hwnd, "Error compiling shader. Check shader-error.txt for message.", shaderFileName, MB_OK);
}
void TerrainShaderClass::RenderShader(ID3D11DeviceContext* deviceContext, int indexCount)
{
deviceContext->IASetInputLayout(m_layout);
deviceContext->VSSetShader(m_vertexShader, NULL, 0);
deviceContext->PSSetShader(m_pixelShader, NULL, 0);
deviceContext->PSSetSamplers(0, 1, &m_samplerState);
deviceContext->DrawIndexed(indexCount, 0, 0);
}
bool TerrainShaderClass::InitializeShader(ID3D11Device* device, HWND hwnd, LPCSTR vsFileName, LPCSTR psFileName)
{
HRESULT result;
ID3D10Blob* errorMessage = nullptr;
ID3D10Blob* vertexShaderBuffer = nullptr;
ID3D10Blob* pixelShaderBuffer = nullptr;
D3D11_INPUT_ELEMENT_DESC polygonLayout[3];
unsigned int numElements;
D3D11_SAMPLER_DESC samplerDesc;
D3D11_BUFFER_DESC matrixBufferDesc;
D3D11_BUFFER_DESC lightBufferDesc;
result = D3DX11CompileFromFile(vsFileName, NULL, NULL, "TerrainVertexShader", "vs_5_0", D3D10_SHADER_ENABLE_STRICTNESS,
0, NULL, &vertexShaderBuffer, &errorMessage, NULL);
if (FAILED(result))
{
if (errorMessage)
{
OutputShaderErrorMessage(errorMessage, hwnd, vsFileName);
}
else
{
MessageBox(hwnd, "Missing Shader File", vsFileName, MB_OK);
}
return false;
}
result = D3DX11CompileFromFile(psFileName, NULL, NULL, "TerrainPixelShader", "ps_5_0", D3D10_SHADER_ENABLE_STRICTNESS,
0, NULL, &pixelShaderBuffer, &errorMessage, NULL);
if (FAILED(result))
{
if (errorMessage)
{
OutputShaderErrorMessage(errorMessage, hwnd, psFileName);
}
else
{
MessageBox(hwnd, "Missing Shader File", psFileName, MB_OK);
}
return false;
}
result = device->CreateVertexShader(vertexShaderBuffer->GetBufferPointer(), vertexShaderBuffer->GetBufferSize(), NULL, &m_vertexShader);
if (FAILED(result))
{
return false;
}
result = device->CreatePixelShader(pixelShaderBuffer->GetBufferPointer(), pixelShaderBuffer->GetBufferSize(), NULL, &m_pixelShader);
if (FAILED(result))
{
return false;
}
polygonLayout[0].SemanticName = "POSITION";
polygonLayout[0].SemanticIndex = 0;
polygonLayout[0].Format = DXGI_FORMAT_R32G32B32_FLOAT;
polygonLayout[0].InputSlot = 0;
polygonLayout[0].AlignedByteOffset = 0;
polygonLayout[0].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
polygonLayout[0].InstanceDataStepRate = 0;
polygonLayout[1].SemanticName = "TEXCOORD";
polygonLayout[1].SemanticIndex = 0;
polygonLayout[1].Format = DXGI_FORMAT_R32G32_FLOAT;
polygonLayout[1].InputSlot = 0;
polygonLayout[1].AlignedByteOffset = D3D11_APPEND_ALIGNED_ELEMENT;
polygonLayout[1].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
polygonLayout[1].InstanceDataStepRate = 0;
polygonLayout[2].SemanticName = "NORMAL";
polygonLayout[2].SemanticIndex = 0;
polygonLayout[2].Format = DXGI_FORMAT_R32G32B32_FLOAT;
polygonLayout[2].InputSlot = 0;
polygonLayout[2].AlignedByteOffset = D3D11_APPEND_ALIGNED_ELEMENT;
polygonLayout[2].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
polygonLayout[2].InstanceDataStepRate = 0;
numElements = sizeof(polygonLayout) / sizeof(polygonLayout[0]);
result = device->CreateInputLayout(polygonLayout, numElements, vertexShaderBuffer->GetBufferPointer(), vertexShaderBuffer->GetBufferSize(), &m_layout);
if (FAILED(result))
{
return false;
}
vertexShaderBuffer->Release();
vertexShaderBuffer = nullptr;
pixelShaderBuffer->Release();
pixelShaderBuffer = nullptr;
samplerDesc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
samplerDesc.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.MipLODBias = 0.0f;
samplerDesc.MaxAnisotropy = 1;
samplerDesc.ComparisonFunc = D3D11_COMPARISON_ALWAYS;
samplerDesc.BorderColor[0] = 0;
samplerDesc.BorderColor[1] = 0;
samplerDesc.BorderColor[2] = 0;
samplerDesc.BorderColor[3] = 0;
samplerDesc.MinLOD = 0;
samplerDesc.MaxLOD = D3D11_FLOAT32_MAX;
result = device->CreateSamplerState(&samplerDesc, &m_samplerState);
if (FAILED(result))
{
return false;
}
matrixBufferDesc.Usage = D3D11_USAGE_DYNAMIC;
matrixBufferDesc.ByteWidth = sizeof(MatrixBufferType);
matrixBufferDesc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
matrixBufferDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
matrixBufferDesc.MiscFlags = 0;
matrixBufferDesc.StructureByteStride = 0;
result = device->CreateBuffer(&matrixBufferDesc, NULL, &m_matrixBuffer);
if (FAILED(result))
{
return false;
}
//ByteWidth must be a multiple of 16 if using D3D11_BIND_CONSTANT_BUFFER or CreateBuffer will fail.
lightBufferDesc.Usage = D3D11_USAGE_DYNAMIC;
lightBufferDesc.ByteWidth = sizeof(LightBufferType);
lightBufferDesc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
lightBufferDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
lightBufferDesc.MiscFlags = 0;
lightBufferDesc.StructureByteStride = 0;
device->CreateBuffer(&lightBufferDesc, NULL, &m_lightBuffer);
if (FAILED(result))
{
return false;
}
return true;
}
The texture is supposed to look like displayed on the link, but instead looks like really weird (can't seem to be able to take a screen shot, will add if possible).
I tried looking into other questions here, but none solved the issue.
I am still fairly new to DX11, so any help is much appreciated.
Edit: Here is a screenshot (left side: supposed, right side: my game)
Im looking at your screenshot, and your not only is your texture not rendering correctly, but your normals aren't either otherwise you would have a diffuse at least shading it properly. I would summise, that although your stride is correct, what you are pulling out of the buffer for UV and Normal is not aligned properly. My first thoughts.

DirectX does not draw anything

Hello I recently tried to learn DirectX 11 but my program does not draw anything.
The only thing I get is the window with the background color i have chosen
I have divided my program into a library(engine) and a regular project.
The library contains a model class, shader class and a Directx init function.
the S3DData is just a struct containing all relevant classes e.g. swap chain etc.
static bool initDX(logfile* errorlog, S3DData *data){
D3D_FEATURE_LEVEL featureLevels[] = {
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0
};
UINT numFeatureLevels = 3;
D3D_FEATURE_LEVEL featureLevel = D3D_FEATURE_LEVEL_11_0;
HRESULT result = ERROR_SUCCESS;
DXGI_MODE_DESC bufferDesc;
ZeroMemory(&bufferDesc, sizeof(DXGI_MODE_DESC));
//swapchain and device
bufferDesc.Height = data->WindowHeight;
bufferDesc.Width = data->WindowWidth;
bufferDesc.RefreshRate.Denominator = 1;
bufferDesc.RefreshRate.Numerator = 60;
bufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
bufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
bufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
DXGI_SWAP_CHAIN_DESC swapChainDesc;
ZeroMemory(&swapChainDesc, sizeof(DXGI_SWAP_CHAIN_DESC));
swapChainDesc.BufferDesc = bufferDesc;
swapChainDesc.OutputWindow = data->Handle;
swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDesc.Windowed = data->Windowed;
swapChainDesc.BufferCount = 1;
swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
swapChainDesc.SampleDesc.Quality = 0;
swapChainDesc.SampleDesc.Count = 1;
result = D3D11CreateDeviceAndSwapChain(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, NULL, NULL, NULL,
D3D11_SDK_VERSION, &swapChainDesc, &data->SwapChain, &data->Device, NULL, &data->DeviceContext);
if(FAILED(result)){
std::string error;
errorlog->write("failed to create swapchain or device:");
if(result == E_INVALIDARG)
error = "invalid argument";
else if(result == E_OUTOFMEMORY)
error = " no memory";
else if(result == DXGI_ERROR_MORE_DATA)
error = " more data needed for buffer";
else if(result == E_NOTIMPL)
error = " not implemented";
else if(result == DXGI_ERROR_INVALID_CALL)
error = " invalid call";
else
error = std::to_string((unsigned int)result);
errorlog->write(error);
return false;
}
//back buffer and rendertargetview
ID3D11Texture2D *backbuffer;
result = data->SwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**)&backbuffer);
if(FAILED(result)){
errorlog->write("failed to get backbuffer");
return false;
}
result = data->Device->CreateRenderTargetView(backbuffer, NULL, &data->RenderTargetView);
if(FAILED(result)){
errorlog->write("failed to create render target view");
return false;
}
data->DeviceContext->OMSetRenderTargets(1, &data->RenderTargetView, nullptr);
backbuffer->Release();
ZeroMemory(&data->viewport, sizeof(D3D11_VIEWPORT));
data->viewport.Height = data->WindowHeight;
data->viewport.Width = data->WindowWidth;
data->viewport.TopLeftX = 0;
data->viewport.TopLeftY = 0;
data->DeviceContext->RSSetViewports(1, &data->viewport);
errorlog->write("directx success");
return true;
the function basically creates: the device, swapchain and devicecontext.
and sets: the render target and the viewport
the second funtion is the shader init function:
bool shader::init(std::string vsFile, std::string psFile, S3DData * data){
std::ofstream output;
output.open("shaderErrorLog.txt", std::ios::binary);
_S3DData = data;
_pixelShader = nullptr;
_vertexShader = nullptr;
_layout = nullptr;
HRESULT result;
ID3D10Blob *errorMsg, *pixelShader, *vertexShader;;
unsigned int numElements;
errorMsg = 0;
pixelShader = 0;
vertexShader = 0;
result = D3DX11CompileFromFile(vsFile.c_str(), 0, 0, "VS", "vs_5_0", 0, 0, 0, &vertexShader, &errorMsg, 0);
if(FAILED(result)){
if(errorMsg != nullptr){
char *compilerErrors = (char*)errorMsg->GetBufferPointer();
unsigned int size = errorMsg->GetBufferSize();
output.write(compilerErrors, size);
}
else
{
std::string error ="failed to find file";
output.write(error.c_str(), error.size());
}
return false;
}
result = D3DX11CompileFromFile(psFile.c_str(), 0, 0, "PS", "ps_5_0", 0, 0, 0, &pixelShader, &errorMsg, 0);
if(FAILED(result)){
if(errorMsg){
char *compilerErrors = (char*)errorMsg->GetBufferPointer();
unsigned int size = errorMsg->GetBufferSize();
output.write(compilerErrors, size);
}
else
{
std::string noFileMsg = "file " +psFile +"not found";
output.write(noFileMsg.c_str(), noFileMsg.size());
}
return false;
}
result = _S3DData->Device->CreateVertexShader(vertexShader->GetBufferPointer(), vertexShader->GetBufferSize(), nullptr, &_vertexShader);
if(FAILED(result)){
return false;
}
result = _S3DData->Device->CreatePixelShader(pixelShader->GetBufferPointer(), pixelShader->GetBufferSize(), nullptr, &_pixelShader);
if(FAILED(result)){
return false;
}
//layout of vertex
//in case of color.fx position and color
D3D11_INPUT_ELEMENT_DESC layout[] ={
{"POSITION",0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0}
};
//get num of elements
numElements = 2;
result = _S3DData->Device->CreateInputLayout(layout, numElements, vertexShader->GetBufferPointer(), vertexShader->GetBufferSize(), &_layout);
if(FAILED(result))
return false;
vertexShader->Release();
vertexShader = 0;
pixelShader->Release();
pixelShader = 0;
std::string success = "shader init : success";
output.write(success.c_str() , success.size());
_S3DData->DeviceContext->IASetInputLayout(_layout);
_S3DData->DeviceContext->VSSetShader(_vertexShader, 0, 0);
_S3DData->DeviceContext->PSSetShader(_pixelShader, 0, 0);
return true;
and these are the members of the shader class:
ID3D11VertexShader *_vertexShader;
ID3D11PixelShader *_pixelShader;
ID3D11InputLayout *_layout;
S3DData *_S3DData;
this function creates the shaders and since i only have 1 shader for now,
it sets the shaders and the input layout.
the last function is the model init function:
bool model::init(S3DData *data){
_S3DData = data;
HRESULT result;
vertex *vertexBuffer;
unsigned long* indexBuffer;
D3D11_BUFFER_DESC indexDesc, vertexDesc;
D3D11_SUBRESOURCE_DATA indexData, vertexData;
//create buffers
_vertexCount = 3;
_indexCount = 3;
vertexBuffer = new vertex[_vertexCount];
if(!vertexBuffer)return false;
indexBuffer = new unsigned long[_indexCount];
if(!indexBuffer)return false;
//fill buffers
vertexBuffer[0] = vertex( 0.0f, 1.0f, 1.0f);
vertexBuffer[0] = vertex( 1.0f, -1.0f, 1.0f);
vertexBuffer[0] = vertex( -1.0f, -1.0f, 1.0f);
indexBuffer[0] = 0;
indexBuffer[1] = 1;
indexBuffer[2] = 2;
//bufferDesc
vertexDesc.Usage = D3D11_USAGE_DEFAULT;
vertexDesc.ByteWidth = sizeof(vertex) * _vertexCount;
vertexDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
vertexDesc.CPUAccessFlags = 0;
vertexDesc.MiscFlags = 0;
vertexDesc.StructureByteStride = 0;
//set subressource data
vertexData.pSysMem = vertexBuffer;
vertexData.SysMemPitch = 0;
vertexData.SysMemSlicePitch = 0;
result = _S3DData->Device->CreateBuffer(&vertexDesc, &vertexData, &_vertex);
if(FAILED(result))return false;
indexDesc.ByteWidth = sizeof(unsigned long) * _indexCount;
indexDesc.Usage = D3D11_USAGE_DEFAULT;
indexDesc.BindFlags = D3D11_BIND_INDEX_BUFFER;
indexDesc.MiscFlags = 0;
indexDesc.CPUAccessFlags = 0;
indexDesc.StructureByteStride = 0;
//set subressource
indexData.pSysMem = indexBuffer;
indexData.SysMemPitch = 0;
indexData.SysMemSlicePitch = 0;
result = _S3DData->Device->CreateBuffer(&indexDesc, &indexData, &_index);
if(FAILED(result))return false;
delete []indexBuffer;
indexBuffer = nullptr;
delete []vertexBuffer;
vertexBuffer = nullptr;
the vertex struct:
struct vertex{
XMFLOAT3 pos;
vertex(){}
vertex(float x, float y, float z):pos(x, y, z){
}
so this function only creates the buffers
in the render function the remaining variable are set:
void model::render(shader *Shader){
unsigned int stride = sizeof(vertex);
unsigned int offset = 0;
_S3DData->DeviceContext->IASetVertexBuffers(0, 1, &_vertex, &stride, &offset);
_S3DData->DeviceContext->IASetIndexBuffer(_index, DXGI_FORMAT_R32_UINT, 0);
//set form of vertex: triangles
_S3DData->DeviceContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
_S3DData->DeviceContext->DrawIndexed(_indexCount, 0, 0);
}
EDIT:
these are the shader codes you requested
Vertex Shader:
struct VSout{
float4 position :SV_POSITION;
};
VSout VS(float4 position:POSITION){
VSout output;
output.position = position;
return output;
}
Pixel Shader:
float4 PS() :SV_TARGET{
float4 newColor = float4(1.0f, 1.0f, 0.0f, 1.0f);
return newColor;
}
this here is a screenshot of the debuger left you have all the draw calls etc. And in the middle you can see the vertex buffer
debuger
thanks for your help in advance.
Looking at the debugger image you posted, the 2nd and 3rd vertices are all 0. This means you didn't fill your vertex buffer properly.
Looking at your code, when you fill your vertex buffer, you're only setting it in the 0 index. So you code looks like this:
vertexBuffer[0] = vertex( 0.0f, 1.0f, 1.0f);
vertexBuffer[0] = vertex( 1.0f, -1.0f, 1.0f);
vertexBuffer[0] = vertex( -1.0f, -1.0f, 1.0f);
And it should look like this:
vertexBuffer[0] = vertex( 0.0f, 1.0f, 1.0f);
vertexBuffer[1] = vertex( 1.0f, -1.0f, 1.0f);
vertexBuffer[2] = vertex( -1.0f, -1.0f, 1.0f);

HLSL Vertex Shader gets wrong input

I am currently working on an project but my problem is that my Vertex shader gets the wrong data, so my position values aren't any longer the same as i set them at the beginning.
So this is where I define the position/anchor of my Sprite
struct SpriteVertex
{
DirectX::XMFLOAT3 position;
float radius;
int textureIndex;
};
//Sprite renderer
vector<SpriteVertex> sprite_vertices;
SpriteVertex current;
current.position.x = 0;
current.position.y = 0;
current.position.z = 0;
current.radius = 100;
current.textureIndex = 0;
sprite_vertices.push_back(current);
g_SpriteRenderer->renderSprites(pd3dImmediateContext, sprite_vertices, g_camera);
So in my SpriteRenderer Class I have the create method where I set up the Input layout and create an empty vertex buffer.
HRESULT SpriteRenderer::create(ID3D11Device* pDevice)
{
cout << "Spriterender Create has been called" << endl;
HRESULT hr;
D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = 1024 * sizeof(SpriteVertex);
bd.BindFlags = D3D11_BIND_SHADER_RESOURCE;
bd.CPUAccessFlags = 0;
bd.MiscFlags = 0;
V(pDevice->CreateBuffer(&bd , nullptr, &m_pVertexBuffer));
const D3D11_INPUT_ELEMENT_DESC layout[] ={
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "RADIUS", 0, DXGI_FORMAT_R32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXTUREINDEX",0,DXGI_FORMAT_R32_SINT,0,D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 }
};
UINT numEements = sizeof(layout) / sizeof(layout[0]);
D3DX11_PASS_DESC pd;
V_RETURN(m_pEffect->GetTechniqueByName("Render")->GetPassByName("SpritePass")->GetDesc(&pd));
V_RETURN(pDevice->CreateInputLayout(layout, numEements, pd.pIAInputSignature, pd.IAInputSignatureSize, &m_pInputLayout));
return S_OK;
}
And I have the render method which fills the buffer and is supposed to render it with the shader I coded:
void SpriteRenderer::renderSprites(ID3D11DeviceContext* context, const std::vector<SpriteVertex>& sprites, const CFirstPersonCamera& camera)
{
//cout << "SpriterenderrenderSprites has been called" << endl;
D3D11_BOX box;
box.left = 0; box.right = sprites.size()*sizeof(SpriteVertex);
box.top = 0; box.bottom = 1;
box.front = 0; box.back = 1;
context->UpdateSubresource(m_pVertexBuffer,0,&box,&sprites[0],0,0);
const UINT size = sizeof(SpriteVertex);
context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_POINTLIST);
context->IASetInputLayout(m_pInputLayout);
context->IASetVertexBuffers(0, 0, &m_pVertexBuffer, &size, nullptr);
//setting shader resouirces
DirectX::XMMATRIX worldviewProj =camera.GetViewMatrix()*camera.GetProjMatrix();
m_pEffect->GetVariableByName("g_ViewProjection")->AsMatrix()->SetMatrix(( float* ) &worldviewProj);
m_pEffect->GetVariableByName("g_cameraRight")->AsVector()->SetFloatVector((float*) &camera.GetWorldRight());
m_pEffect->GetVariableByName("g_cameraUP")->AsVector()->SetFloatVector((float*)&camera.GetWorldUp());
m_pEffect->GetTechniqueByName("Render")->GetPassByName("SpritePass")->Apply( 0,context);
context->Draw(size,0);
}
So my big problem is that when I debug the Shaders that my inital Position radius and so on aren't even close to what I want:
Debug VS
I'm trying to fix this now for ever any help would be really appreciated.
EDIT: HLSL Code might help ;=)
//--------------------------------------------------------------------------------------
// Shader resources
//--------------------------------------------------------------------------------------
//--------------------------------------------------------------------------------------
// Constant buffers
//--------------------------------------------------------------------------------------
cbuffer cbCOnstant
{
matrix g_ViewProjection;
float4 g_cameraRight;
float4 g_cameraUP;
};
//--------------------------------------------------------------------------------------
// Structs
//--------------------------------------------------------------------------------------
struct SpriteVertex
{
float3 POSITION : POSITION;
float RADIUS: RADIUS;
int TEXIN : TEXTUREINDEX;
};
struct PSVertex
{
float4 POSITION : SV_Position;
int TEXIN : TEXTUREINDEX;
};
//--------------------------------------------------------------------------------------
// Rasterizer states
//--------------------------------------------------------------------------------------
RasterizerState rsCullNone
{
CullMode = None;
};
//--------------------------------------------------------------------------------------
// DepthStates
//--------------------------------------------------------------------------------------
DepthStencilState EnableDepth
{
DepthEnable = TRUE;
DepthWriteMask = ALL;
DepthFunc = LESS_EQUAL;
};
BlendState NoBlending
{
AlphaToCoverageEnable = FALSE;
BlendEnable[0] = FALSE;
};
//--------------------------------------------------------------------------------------
// Shaders
//--------------------------------------------------------------------------------------
SpriteVertex DummyVS(SpriteVertex Input)
{
return Input;
}
[maxvertexcount(4)]
void SpriteGS(point SpriteVertex vertex[1], inout TriangleStream<PSVertex> stream){
PSVertex input;
input.TEXIN = vertex[0].TEXIN;
//bottom left
input.POSITION = mul(float4(vertex[0].POSITION,1) - vertex[0].RADIUS * g_cameraRight - vertex[0].RADIUS * g_cameraUP, g_ViewProjection);
stream.Append(input);
//top left
input.POSITION = mul(float4(vertex[0].POSITION,1) - vertex[0].RADIUS * g_cameraRight + vertex[0].RADIUS * g_cameraUP, g_ViewProjection);
stream.Append(input);
//top right
input.POSITION = mul(float4(vertex[0].POSITION,1) + vertex[0].RADIUS * g_cameraRight + vertex[0].RADIUS * g_cameraUP, g_ViewProjection);
stream.Append(input);
//bot right
input.POSITION = mul(float4(vertex[0].POSITION,1) + vertex[0].RADIUS * g_cameraRight - vertex[0].RADIUS * g_cameraUP, g_ViewProjection);
stream.Append(input);
}
float4 DummyPS(PSVertex input) : SV_Target0
{
return float4(1, 1, 0, 1);
}
//--------------------------------------------------------------------------------------
// Techniques
//--------------------------------------------------------------------------------------
technique11 Render
{
pass SpritePass
{
SetVertexShader(CompileShader(vs_5_0, DummyVS()));
SetGeometryShader(CompileShader(gs_5_0, SpriteGS()));
SetPixelShader(CompileShader(ps_5_0, DummyPS()));
SetRasterizerState(rsCullNone);
SetDepthStencilState(EnableDepth,0);
SetBlendState(NoBlending, float4(0.0f, 0.0f, 0.0f, 0.0f), 0xFFFFFFFF);
}
}
Your binding of the vertex buffer below is wrong :
context->IASetVertexBuffers(0, 0, &m_pVertexBuffer, &size, nullptr);
The seconde arguments of ID3D11DeviceContext::IASetVertexBuffers is the number of buffer to bind, it has to be one here, and offsets has to be valid :
UINT offset = 0;
context->IASetVertexBuffers(0, 1, &m_pVertexBuffer, &size, &offset);
As a general advice, you should turn on the debug device at creation with the flag D3D11_CREATE_DEVICE_DEBUG and look for any message in the output. On windows 10, you may have to install it first following Microsoft instructions installing the debug device