DX11 Losing Instance Buffer Data - c++

I've got a function that basically creates different instance buffers into an array for me to use in my DrawIndexedInstanced call.
But when I pass the vertex buffer and instance buffer through to my shader, my instance data is completely lost when the shader goes to use it, so none of my objects are being relocated and are thus all rendering in the same place.
I've been looking at this for hours and literally cannot find anything that is helpful.
Creating the Vertex shader input layout:
D3D11_INPUT_ELEMENT_DESC solidColorLayout[] =
{
//Vertex Buffer
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 },
//Instance buffer
{ "INSTANCEPOS", 0, DXGI_FORMAT_R32G32B32_FLOAT, 1, 0, D3D11_INPUT_PER_INSTANCE_DATA, 1 },
{ "INSTANCEROT", 0, DXGI_FORMAT_R32G32B32_FLOAT, 1, 12, D3D11_INPUT_PER_INSTANCE_DATA, 1 },
{ "INSTANCESCA", 0, DXGI_FORMAT_R32G32B32_FLOAT, 1, 24, D3D11_INPUT_PER_INSTANCE_DATA, 1 },
{ "INSTANCETEX", 0, DXGI_FORMAT_R32_FLOAT, 1, 36, D3D11_INPUT_PER_INSTANCE_DATA, 1 }
};
Creating an instance buffer (called multiple times per frame, to create all necessary buffers):
void GameManager::CreateInstanceBuffer(ID3D11Buffer** buffer, Mesh* mesh, std::vector<Instance> instances)
{
D3D11_BUFFER_DESC instBuffDesc;
ZeroMemory(&instBuffDesc, sizeof(instBuffDesc));
instBuffDesc.Usage = D3D11_USAGE_DEFAULT;
instBuffDesc.ByteWidth = sizeof(Instance) * instances.size();
instBuffDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
instBuffDesc.CPUAccessFlags = 0;
instBuffDesc.MiscFlags = 0;
instBuffDesc.StructureByteStride = 0;
int i = sizeof(Instance);
D3D11_SUBRESOURCE_DATA instData;
ZeroMemory(&instData, sizeof(instData));
instData.pSysMem = &instances;
instData.SysMemPitch = 0;
instData.SysMemSlicePitch = 0;
CheckFailWithError(dxManager.GetDevice()->CreateBuffer(&instBuffDesc, &instData, buffer),
"An error occurred whilst building an instance buffer",
"[GameManager]");
meshBuffers.push_back(mesh->GetBuffer(VERTEX_BUFFER));
}
The draw command:
dxManager.GetContext()->DrawIndexedInstanced(instanceIndexCounts[buffer], instanceCounts[buffer], 0, 0, 0);
The shader:
cbuffer cbChangesEveryFrame : register(b0)
{
matrix worldMatrix;
};
cbuffer cbNeverChanges : register(b1)
{
matrix viewMatrix;
};
cbuffer cbChangeOnResize : register(b2)
{
matrix projMatrix;
};
struct VS_Input
{
float4 pos : POSITION;
float2 tex0 : TEXCOORD0;
float4 instancePos : INSTANCEPOS;
float4 instanceRot : INSTANCEROT;
float4 instanceSca : INSTANCESCA;
float instanceTex : INSTANCETEX;
};
PS_Input VS_Main(VS_Input vertex)
{
PS_Input vsOut = (PS_Input)0;
vsOut.pos = mul(vertex.pos + vertex.instancePos, worldMatrix);
vsOut.pos = mul(vsOut.pos, viewMatrix);
vsOut.pos = mul(vsOut.pos, projMatrix);
vsOut.tex0 = vertex.tex0;
return vsOut;
}
I've used the graphics debugger built into Visual Studio. Initially it appeared to be assigning variables in the Vertex shader back to front, however removing APPEND_ALIGNED_ELEMENT from the AlignedByteOffset has fixed that, however the per-instance data seems to be corrupt and is not getting recieved.
If there is anything else you need let me know and I'll update the post as necessary.

The problem lies in your subresource data.
instData.pSysMem = &instances;
You are not specifying which offset to read the memory from. Try using
instData.pSysMem = &instances[0];
or
instData.pSysMem = &instances.at(0);
That clarifies where to start reading memory from and will hopefully fix your issue.

Related

DirectX 11 Tesellation Shader Not Working

I have a problem with my tesellation shader. It renders when i don't use Hull Shader or Domain Shader. Just plain vertex and pixel shader works fine. Here is my VS and PS shaders:
VOut VShader(float4 position : POSITION, float4 color : COLOR)
{
VOut output;
output.position = mul(world, position);
output.color = color;
return output;
}
float4 PShader(float4 position : SV_POSITION, float4 color : COLOR) : SV_TARGET
{
return color;
}
I am using Orthographic Projection to map out pixels to their original positions. Here is the problematic shader. Which inputs float3 vertices and float4 colors.
cbuffer cbPerFrame : register(b0) {
matrix world; };
struct VS_CONTROL_POINT_INPUT {
float3 vPosition : POSITION; };
struct VS_CONTROL_POINT_OUTPUT {
float3 vPosition : POSITION; };
struct HS_CONSTANT_DATA_OUTPUT {
float Edges[3] : SV_TessFactor;
float Inside : SV_InsideTessFactor; };
struct HS_OUTPUT {
float3 vPosition : POSITION; };
HS_CONSTANT_DATA_OUTPUT ConstantHS(InputPatch<VS_CONTROL_POINT_OUTPUT, 3> ip,uint PatchID : SV_PrimitiveID) {
HS_CONSTANT_DATA_OUTPUT Output;
Output.Edges[0] = Output.Edges[1] = Output.Edges[2] = 4;
Output.Inside = 4;
return Output; }
[domain("tri")]
[partitioning("integer")]
[outputtopology("triangle_cw")]
[outputcontrolpoints(3)]
[patchconstantfunc("ConstantHS")]
HS_OUTPUT HShader(InputPatch<VS_CONTROL_POINT_OUTPUT, 3> p, uint i : SV_OutputControlPointID, uint PatchID : SV_PrimitiveID)
{
HS_OUTPUT Output;
Output.vPosition = p[i].vPosition;
return Output;
}
struct DS_OUTPUT {
float4 vPosition : SV_POSITION; };
[domain("tri")]
DS_OUTPUT DShader(HS_CONSTANT_DATA_OUTPUT input, float3 UVW : SV_DomainLocation, const OutputPatch<HS_OUTPUT, 3> quad) {
DS_OUTPUT Output;
float3 finalPos = UVW.x * quad[0].vPosition + UVW.y * quad[1].vPosition + UVW.z * quad[2].vPosition;
Output.vPosition = mul(world,float4(finalPos, 1));
return Output; }
VS_CONTROL_POINT_OUTPUT VShader(VS_CONTROL_POINT_INPUT Input) {
VS_CONTROL_POINT_OUTPUT Output;
Output.vPosition = Input.vPosition;
return Output; }
float4 PShader(DS_OUTPUT Input) : SV_TARGET {
return float4(1, 0, 0, 1); }
My shader init. code:
D3DCompileFromFile(L"shader.hlsl", NULL, NULL, "VShader", "vs_5_0", 0, 0, &VS, &ERR);
D3DCompileFromFile(L"shader.hlsl", NULL, NULL, "PShader", "ps_5_0", 0, 0, &PS, &ERR);
D3DCompileFromFile(L"shader.hlsl", NULL, NULL, "HShader", "hs_5_0", 0, 0, &HS, &ERR);
D3DCompileFromFile(L"shader.hlsl", NULL, NULL, "DShader", "ds_5_0", 0, 0, &DS, &ERR);
dev->CreateVertexShader(VS->GetBufferPointer(), VS->GetBufferSize(), NULL, &pVS);
dev->CreatePixelShader(PS->GetBufferPointer(), PS->GetBufferSize(), NULL, &pPS);
dev->CreateHullShader(HS->GetBufferPointer(), HS->GetBufferSize(), NULL, &pHS);
dev->CreateDomainShader(DS->GetBufferPointer(), DS->GetBufferSize(), NULL, &pDS);
devcon->VSSetShader(pVS, 0, 0);
devcon->HSSetShader(pHS, 0, 0);
devcon->DSSetShader(pDS, 0, 0);
devcon->PSSetShader(pPS, 0, 0);
Input descriptor:
D3D11_INPUT_ELEMENT_DESC ied[] =
{
{"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"COLOR", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0},
};
dev->CreateInputLayout(ied, 2, VS->GetBufferPointer(), VS->GetBufferSize(), &pLayout);
devcon->IASetInputLayout(pLayout);
Rasterizer:
D3D11_RASTERIZER_DESC RasterDesc = {};
RasterDesc.FillMode = D3D11_FILL_SOLID;
RasterDesc.CullMode = D3D11_CULL_NONE;
RasterDesc.DepthClipEnable = TRUE;
ID3D11RasterizerState* WireFrame=NULL;
dev->CreateRasterizerState(&RasterDesc, &WireFrame);
devcon->RSSetState(WireFrame);
Input Vertices:
OurVertices = (VERTEX*)malloc(PointCount * sizeof(VERTEX));
for (int i = 0; i < PointCount; i++)
{
OurVertices[i] = { RandOm() * i,RandOm() * i ,RandOm() ,{abs(RandOm()),abs(RandOm()),abs(RandOm()),1.0f} };
}
CBuffer:
ID3D11Buffer* g_pConstantBuffer11 = NULL;
cbuff.world = XMMatrixOrthographicOffCenterLH(SceneY - (ViewPortWidth / 2) * SceneZoom, SceneY + (ViewPortWidth / 2) * SceneZoom,
SceneX - (ViewPortHeight / 2) * SceneZoom, SceneX + (ViewPortHeight / 2) * SceneZoom,-10000.0f, 10000.0f);
D3D11_BUFFER_DESC cbDesc;
cbDesc.ByteWidth = sizeof(CBUFFER);
cbDesc.Usage = D3D11_USAGE_DYNAMIC;
cbDesc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
cbDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
cbDesc.MiscFlags = 0;
cbDesc.StructureByteStride = 0;
D3D11_SUBRESOURCE_DATA InitData;
InitData.pSysMem = &cbuff;
InitData.SysMemPitch = 0;
InitData.SysMemSlicePitch = 0;
dev->CreateBuffer(&cbDesc, &InitData,&g_pConstantBuffer11);
devcon->VSSetConstantBuffers(0, 1, &g_pConstantBuffer11);
On Render:
devcon->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_3_CONTROL_POINT_PATCHLIST);
All possible exceptions are handled in this code. Just for clean code, they are removed. And there are no warnings or exceptions on Init and Render stage.
I can't even debug it because nothing being drawn to output. Also, is there any way to see output values from shaders?
cbuffer cbPerFrame : register(b0)
{
matrix world;
};
Is not used by the vertex shader anymore, but by the domain shader, however, it does not seem you are attaching it, so you should have :
devcon->VSSetConstantBuffers(0, 1, &g_pConstantBuffer11); //Vertex
devcon->DSSetConstantBuffers(0, 1, &g_pConstantBuffer11); //Domain
(Please note that you don't normally need to bind to vertex anymore if you use tesselation only).
Otherwise the pipeline will read a zero matrix.

Normals are not transfered to DirectX 11 shader correctly - random, time-dependent values?

Today I was trying to add normal maps to my DirectX 11 application.
Something went wrong. I've decided to output the normals' information instead of color on scene objects to "see" where lies the problem.
What surprised me is that the normals' values changes very fast (the colors are blinking each frame). And I'm sure that I don't manipulate with their values during program execution (the position of vertices stays stable, but the normals do not).
Here are two screens for some frames at t1 and t2:
My vertex structure:
struct MyVertex{//vertex structure
MyVertex() : weightCount(0), normal(0,0,0){
//textureCoordinates.x = 1;
//textureCoordinates.y = 1;
}
MyVertex(float x, float y, float z, float u, float v, float nx, float ny, float nz)
: position(x, y, z), textureCoordinates(u, v), normal(0,0,0), weightCount(0){
}
DirectX::XMFLOAT3 position;
DirectX::XMFLOAT2 textureCoordinates;
DirectX::XMFLOAT3 normal = DirectX::XMFLOAT3(1.0f, 0.0f, 0.0f);
//will not be sent to shader (and used only by skinned models)
int startWeightIndex;
int weightCount; //=0 means that it's not skinned vertex
};
The corresponding vertex layout:
layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 20, D3D11_INPUT_PER_VERTEX_DATA, 0 };
Vertex buffer:
D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DEFAULT; //D3D11_USAGE_DYNAMIC
bd.ByteWidth = sizeof(MyVertex) * structure->getVerticesCount();
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = 0;
D3D11_SUBRESOURCE_DATA InitData;
ZeroMemory(&InitData, sizeof(InitData));
InitData.pSysMem = structure->vertices;
if(device->CreateBuffer(&bd, &InitData, &buffers->vertexBuffer) != S_OK){
return false;
}
And the shader that output normals "as color" (of course, if I set output.normal to float3(1,1,1), objects stays white):
struct Light
{
float3 diffuse;
float3 position;
float3 direction;
};
cbuffer cbPerObject : register(b0)
{
float4x4 WVP;
float4x4 World;
float4 difColor;
bool hasTexture;
bool hasNormMap;
};
cbuffer cbPerFrame : register(b1)
{
Light light;
};
Texture2D ObjTexture;
Texture2D ObjNormMap;
SamplerState ObjSamplerState;
TextureCube SkyMap;
struct VS_INPUT
{
float4 position : POSITION;
float2 tex : TEXCOORD;
float3 normal : NORMAL;
};
struct VS_OUTPUT
{
float4 Pos : SV_POSITION;
float4 worldPos : POSITION;
float3 normal : NORMAL;
float2 TexCoord : TEXCOORD;
float3 tangent : TANGENT;
};
VS_OUTPUT VS(VS_INPUT input)
{
VS_OUTPUT output;
//input.position.w = 1.0f;
output.Pos = mul(input.position, WVP);
output.worldPos = mul(input.position, World);
output.normal = input.normal;
output.tangent = mul(input.tangent, World);
output.TexCoord = input.tex;
return output;
}
float4 PS(VS_OUTPUT input) : SV_TARGET
{
return float4(input.normal, 1.0);
}
//--------------------------------------------------------------------------------------
// Techniques
//--------------------------------------------------------------------------------------
technique10 RENDER
{
pass P0
{
SetVertexShader( CompileShader( vs_4_0, VS() ) );
SetPixelShader( CompileShader( ps_4_0, PS() ) );
SetBlendState( SrcAlphaBlendingAdd, float4( 0.0f, 0.0f, 0.0f, 0.0f ), 0xFFFFFFFF );
}
}
Where have I made an mistake? Maybe there are other places in code that can cause that strange behavior (some locking, buffers, dunno...)?
edit:
As 413X suggested, I've run the DirectX Diagnostic:
What is strange that on the small preview, the screen looks the same as in program. But when I investigate that frame (screenshot), I got completely different colors:
Also, here's something strange - I pick the blue pixel and it's says it's black (on the right):
edit 2:
As catflier requested I post some additional code.
The rendering and buffers binding:
//set the object world matrix
DirectX::XMMATRIX objectWorldMatrix = DirectX::XMMatrixIdentity();
DirectX::XMMATRIX rotationMatrix = DirectX::XMMatrixRotationQuaternion(
DirectX::XMVectorSet(object->getOrientation().getX(), object->getOrientation().getY(), object->getOrientation().getZ(), object->getOrientation().getW())
);
irectX::XMMATRIX scaleMatrix = (
object->usesScaleMatrix()
? DirectX::XMMatrixScaling(object->getHalfSize().getX(), object->getHalfSize().getY(), object->getHalfSize().getZ())
: DirectX::XMMatrixScaling(1.0f, 1.0f, 1.0f)
);
DirectX::XMMATRIX translationMatrix = DirectX::XMMatrixTranslation(object->getPosition().getX(), object->getPosition().getY(), object->getPosition().getZ());
objectWorldMatrix = scaleMatrix * rotationMatrix * translationMatrix;
UINT stride = sizeof(MyVertex);
UINT offset = 0;
context->IASetVertexBuffers(0, 1, &buffers->vertexBuffer, &stride, &offset); //set vertex buffer
context->IASetIndexBuffer(buffers->indexBuffer, DXGI_FORMAT_R16_UINT, 0); //set index buffer
//set the constants per object
ConstantBufferStructure constantsPerObject;
//set matrices
DirectX::XMFLOAT4X4 view = myCamera->getView();
DirectX::XMMATRIX camView = XMLoadFloat4x4(&view);
DirectX::XMFLOAT4X4 projection = myCamera->getProjection();
DirectX::XMMATRIX camProjection = XMLoadFloat4x4(&projection);
DirectX::XMMATRIX worldViewProjectionMatrix = objectWorldMatrix * camView * camProjection;
constantsPerObject.worldViewProjection = XMMatrixTranspose(worldViewProjectionMatrix);
constantsPerObject.world = XMMatrixTranspose(objectWorldMatrix);
//draw objects's non-transparent subsets
for(int i=0; i<structure->subsets.size(); i++){
setColorsAndTextures(structure->subsets[i], constantsPerObject, context); //custom method that insert data into constantsPerObject variable
//bind constants per object to constant buffer and send it to vertex and pixel shaders
context->UpdateSubresource(constantBuffer, 0, NULL, &constantsPerObject, 0, 0);
context->VSSetConstantBuffers(0, 1, &constantBuffer);
context->PSSetConstantBuffers(0, 1, &constantBuffer);
context->RSSetState(RSCullDefault);
int start = structure->subsets[i]->getVertexIndexStart();
int count = structure->subsets[i]->getVertexIndexAmmount();
context->DrawIndexed(count, start, 0);
}
The rasterizer:
void RendererDX::initCull(ID3D11Device * device){
D3D11_RASTERIZER_DESC cmdesc;
ZeroMemory(&cmdesc, sizeof(D3D11_RASTERIZER_DESC));
cmdesc.FillMode = D3D11_FILL_SOLID;
cmdesc.CullMode = D3D11_CULL_BACK;
#ifdef GRAPHIC_LEFT_HANDED
cmdesc.FrontCounterClockwise = true;
#else
cmdesc.FrontCounterClockwise = false;
#endif
cmdesc.CullMode = D3D11_CULL_NONE;
//cmdesc.FillMode = D3D11_FILL_WIREFRAME;
HRESULT hr = device->CreateRasterizerState(&cmdesc, &RSCullDefault);
}
edit 3:
The debugger output (there are some mismatches in semantics?):
D3D11 ERROR: ID3D11DeviceContext::DrawIndexed: Input Assembler - Vertex Shader linkage error: Signatures between stages are incompatible. The input stage requires Semantic/Index (NORMAL,0) as input, but it is not provided by the output stage. [ EXECUTION ERROR #342: DEVICE_SHADER_LINKAGE_SEMANTICNAME_NOT_FOUND]
D3D11 ERROR: ID3D11DeviceContext::DrawIndexed: Input Assembler - Vertex Shader linkage error: Signatures between stages are incompatible. Semantic 'TEXCOORD' is defined for mismatched hardware registers between the output stage and input stage. [ EXECUTION ERROR #343: DEVICE_SHADER_LINKAGE_REGISTERINDEX]
D3D11 ERROR: ID3D11DeviceContext::DrawIndexed: Input Assembler - Vertex Shader linkage error: Signatures between stages are incompatible. Semantic 'TEXCOORD' in each signature have different min precision levels, when they must bet identical. [ EXECUTION ERROR #3146050: DEVICE_SHADER_LINKAGE_MINPRECISION]
I am pretty sure your bytes are missaligned. A float is 4 bytes me thinks and a float4 is then 16 bytes. And it wants to be 16 byte aligned. So observe:
layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 20, D3D11_INPUT_PER_VERTEX_DATA, 0 };
The value; 0,12,20. (AlignedByteOffset) Is where the value then starts. Which would mean; Position starts at 0. Texcoord starts at the end of a float3, which gives you wrong results. Because look inside the shader:
struct VS_INPUT
{
float4 position : POSITION;
float2 tex : TEXCOORD;
float3 normal : NORMAL;
};
And Normal at float3+float2. So generally, you want to align things more consistantly. Maybe even "padding" to fill the spaces to keep all the variables at 16 bytes aligned.
But to keep it more simple for you. You want to switch that statement to:
layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 };
What happens now? Well, the thing aligns itself automagically, however it can be less optimal. But one thing about shaders, try to keep it 16 byte aligned.
Your data structure on upload doesn't match your Input Layout declaration.
since your data structure for vertex is :
struct MyVertex{//vertex structure
MyVertex() : weightCount(0), normal(0,0,0){
//textureCoordinates.x = 1;
//textureCoordinates.y = 1;
}
MyVertex(float x, float y, float z, float u, float v, float nx, float ny, float nz)
: position(x, y, z), textureCoordinates(u, v), normal(0,0,0), weightCount(0){
}
DirectX::XMFLOAT3 position;
DirectX::XMFLOAT2 textureCoordinates;
DirectX::XMFLOAT3 normal = DirectX::XMFLOAT3(1.0f, 0.0f, 0.0f);
//will not be sent to shader (and used only by skinned models)
int startWeightIndex;
int weightCount; //=0 means that it's not skinned vertex
};
startWeightIndex and weightCount will be copied into your vertex buffer (even if they do not contain anything useful.
If you check sizeof(MyVertex), you will have a size of 40.
Now let's look at your input layout declaration (whether you use automatic offset or not is irrelevant):
layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 20, D3D11_INPUT_PER_VERTEX_DATA, 0 };
From what you see here, you are declaring a data structure of (12+8+12) = 32 bytes, which of course does not match your vertex size.
So first vertex will be fetched properly, but next ones will start to use invalid data (as the Input Assembler doesn't know that your data structure is bigger than what you specified to it).
Two ways to fix it:
1/ Strip your vertex declaration
In that case you modify your vertex structure to match your input declaration (I removed constructors for brevity:
struct MyVertex
{//vertex structure
DirectX::XMFLOAT3 position;
DirectX::XMFLOAT2 textureCoordinates;
DirectX::XMFLOAT3 normal = DirectX::XMFLOAT3(1.0f, 0.0f, 0.0f);
};
Now your vertex structure exactly matches your declaration, so vertices will be fetched properly.
2/Adapt your Input Layout declaration:
In that case you change your layout to make sure that all data contained in your buffer is declared, so it can be taken into account by the Input Assembler (see below)
Now your declaration becomes:
layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 20, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[3] = { "STARTWEIGHTINDEX", 0, DXGI_FORMAT_R32_SINT, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[4] = { "WEIGHTCOUNT", 0, DXGI_FORMAT_R32_SINT, 0, 36, D3D11_INPUT_PER_VERTEX_DATA, 0 };
So that means you inform the Input assembler of all the data that your structure contains.
In that case even if the data is not needed by your Vertex Shader, as you specified a full data declaration, Input assembler will safely ignore STARTWEIGHTINDEX and WEIGHTCOUNT, but will respect your whole structure padding.

DirectX Toolkit loaded model with custom HLSL shader Vertex shader input signature

I am working on a project in which I use DirectX Toolkit to load FBX models. As I understood, DXTK does not support HLSL shaders for its models, so I have to get the model information (vertex buffer, index buffer, etc.) from the model object and implement a standart Direct3D drawing, if I want to use HLSL shaders for the rendering.
My problem is, that I can't reach the texture coordinates from the vertex shader. For testing, I set the vertex shader to pass through the coordinates to the pixel shader, where I color the whole object with the texture coordinates like this:
float4(input.texCoord, 0.0f, 1.0f);
In the result, the whole object is black, so the texcoords are (0.0, 0.0) everywhere. I checked the model with the DXTK Model::Draw(...) funcion, and it is textured the correct way, so my model, and my model loading code seems correct.
I learnt that DXTK model loading uses the following vertex buffer declaration:
const D3D11_INPUT_ELEMENT_DESC VertexPositionNormalTangentColorTexture::InputElements[] =
{
{ "SV_Position", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TANGENT", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "COLOR", 0, DXGI_FORMAT_R8G8B8A8_UNORM, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
};
So I tried to match my vertex shader input structure like this:
struct VertexShaderInput
{
float3 pos : SV_Position;
float3 normal : NORMAL;
float4 tangent : TANGENT;
float4 color : COLOR;
float2 texCoord : TEXCOORD;
};
Here is how I load the model:
void SceneObject::LoadMesh(
std::shared_ptr<DX::DeviceResources> deviceResources,
const wchar_t* modelFile)
{
auto device = deviceResources->GetD3DDevice();
EffectFactory fx(device);
this->model = Model::CreateFromCMO(device, modelFile, fx, true);
}
And here is my draw function:
void SceneObject::Draw(std::shared_ptr<DX::DeviceResources> deviceResources)
{
auto device = deviceResources->GetD3DDevice();
auto context = deviceResources->GetD3DDeviceContext();
CommonStates states(device);
context->UpdateSubresource(
this->vsConstantBuffer.Get(),
0,
NULL,
&this->vsConstantBufferData,
0,
0);
context->UpdateSubresource(
this->psConstantBuffer.Get(),
0,
NULL,
&this->psConstantBufferData,
0,
0);
//model->Draw(context, states, local, view, proj);
XMVECTOR qid = XMQuaternionIdentity();
const XMVECTORF32 scale = { 1.f, 1.f, 1.f };
const XMVECTORF32 translate = { 0.f, 0.f, 0.f };
//XMVECTOR rotate = XMQuaternionRotationRollPitchYaw(0, XM_PI / 2.f, -XM_PI / 2.f);
XMVECTOR rotate = XMQuaternionRotationRollPitchYaw(0, 0, 0);
XMMATRIX worldMatrix = XMLoadFloat4x4(&this->vsConstantBufferData.model);
XMMATRIX local = XMMatrixMultiply(worldMatrix, XMMatrixTransformation(g_XMZero, qid, scale, g_XMZero, rotate, translate));
//this->model->Draw(context, states, local, XMLoadFloat4x4(&vsConstantBufferData.view), XMLoadFloat4x4(&vsConstantBufferData.projection), false);
XMStoreFloat4x4(&this->vsConstantBufferData.model, local);
for each(auto& mesh in model->meshes)
{
for each (auto& part in mesh->meshParts)
{
context->IASetVertexBuffers(
0,
1,
part->vertexBuffer.GetAddressOf(),
&part->vertexStride,
&part->vertexOffset
);
context->IASetIndexBuffer(
part->indexBuffer.Get(),
part->indexFormat,
0
);
context->IASetPrimitiveTopology(part->primitiveType);
//context->IASetInputLayout(inputLayout.Get());
context->IASetInputLayout(part->inputLayout.Get());
// Attach our vertex shader.
context->VSSetShader(
vertexShader.Get(),
nullptr,
0
);
// Send the constant buffer to the graphics device.
context->VSSetConstantBuffers(
0,
1,
vsConstantBuffer.GetAddressOf()
);
// Attach our pixel shader.
context->PSSetShader(
pixelShader.Get(),
nullptr,
0
);
// Send the constant buffer to the graphics device.
context->PSSetConstantBuffers(
1,
1,
psConstantBuffer.GetAddressOf()
);
context->PSSetShaderResources(0, 1, diffuseTexture.GetAddressOf());
context->PSSetSamplers(0, 1, linearSampler.GetAddressOf());
// Draw the objects.
context->DrawIndexed(
part->indexCount,
part->startIndex,
0
);
}
}
}
If you need more code, you can check my whole project here: https://github.com/GiGu92/WaterRenderingDemo
What am I messing up?
Your code above doesn't indicate where you are creating your vertex shader.
For CMOs, take a look at Src\Shaders\DGSLEffect.fx for how they are used with custom shaders.
The default EffectFactory or DGSLEffectFactory is setting up a standard vertex & pixel shader for rendering the Model as a BasicEffect, SkinnedEffect, or DGSLEffect. Details on doing custom rendering are covered in the wiki, but I suggest your first get it rendering using the default effects as you expect it. See the tutorial.
You can override the entire pipeline if desired in a number of ways:
Implement your own IEffect* interfaces and IEffectFactory to use with the Model loader
Override the shaders while rendering directly with ModelMesh / ModelMeshParts
For CMO files, the vertex format is indeed either VertexPositionNormalTangentColorTexture or VertexPositionNormalTangentColorTextureSkinning, although for SDKMESH files it is a bit more variable.
As always, ensure you have the debug device enabled and check the HRESULT of any Direct3D function that returns one to ensure you aren't missing some configuration or API usage problem.

Can't create vertex buffer

I have a Windows Phone 8 C#/XAML project with DirectX component. I'm trying to rendering some particles. I create a vertex buffer, which I saw it go into the function to create, but when it gets to an update vertex buffer, the buffer is NULL. I have not released the buffers yet. Do you know why this will happen? Does any of the output messages help? Thanks.
Printouts and errors on my Output Window:
'TaskHost.exe' (Win32): Loaded '\Device\HarddiskVolume4\Windows\System32\d3d11_1SDKLayers.dll'. Cannot find or open the PDB file.
D3D11 WARNING: ID3D11Texture2D::SetPrivateData: Existing private data of same name with different size found! [ STATE_SETTING WARNING #55: SETPRIVATEDATA_CHANGINGPARAMS]
Create vertex buffer
D3D11 WARNING: ID3D11DeviceContext::DrawIndexed: The Pixel Shader unit expects a Sampler to be set at Slot 0, but none is bound. This is perfectly valid, as a NULL Sampler maps to default Sampler state. However, the developer may not want to rely on the defaults. [ EXECUTION WARNING #352: DEVICE_DRAW_SAMPLER_NOT_SET]
The thread 0xb64 has exited with code 0 (0x0).
m_vertexBuffer is null
D3D11 WARNING: ID3D11DeviceContext::DrawIndexed: The Pixel Shader unit expects a Sampler to be set at Slot 0, but none is bound. This is perfectly valid, as a NULL Sampler maps to default Sampler state. However, the developer may not want to rely on the defaults. [ EXECUTION WARNING #352: DEVICE_DRAW_SAMPLER_NOT_SET]
m_vertexBuffer is null
In CreateDeviceResources, I call CreateVertexShader, CreateInputLayout, CreatePixelShader, CreateBuffer. Then I get to creating the sampler and vertex buffer, code below:
auto createCubeTask = (createPSTask && createVSTask).then([this] () {
// Create a texture sampler state description.
D3D11_SAMPLER_DESC samplerDesc;
samplerDesc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
samplerDesc.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.MipLODBias = 0.0f;
samplerDesc.MaxAnisotropy = 1;
samplerDesc.ComparisonFunc = D3D11_COMPARISON_ALWAYS;
samplerDesc.BorderColor[0] = 0;
samplerDesc.BorderColor[1] = 0;
samplerDesc.BorderColor[2] = 0;
samplerDesc.BorderColor[3] = 0;
samplerDesc.MinLOD = 0;
samplerDesc.MaxLOD = D3D11_FLOAT32_MAX;
// Create the texture sampler state.
HRESULT result = m_d3dDevice->CreateSamplerState(&samplerDesc, &m_sampleState);
if(FAILED(result))
{
OutputDebugString(L"Can't CreateSamplerState");
}
//InitParticleSystem();
// Set the maximum number of vertices in the vertex array.
m_vertexCount = m_maxParticles * 6;
// Set the maximum number of indices in the index array.
m_indexCount = m_vertexCount;
// Create the vertex array for the particles that will be rendered.
m_vertices = new VertexType[m_vertexCount];
if(!m_vertices)
{
OutputDebugString(L"Can't create the vertex array for the particles that will be rendered.");
}
else
{
// Initialize vertex array to zeros at first.
int sizeOfVertexType = sizeof(VertexType);
int totalSizeVertex = sizeOfVertexType * m_vertexCount;
memset(m_vertices, 0, totalSizeVertex);
D3D11_SUBRESOURCE_DATA vertexBufferData = {0};
vertexBufferData.pSysMem = m_vertices;
vertexBufferData.SysMemPitch = 0;
vertexBufferData.SysMemSlicePitch = 0;
int sizeOfMVertices = sizeof(m_vertices);
CD3D11_BUFFER_DESC vertexBufferDesc(
totalSizeVertex, // byteWidth
D3D11_BIND_VERTEX_BUFFER, // bindFlags
D3D11_USAGE_DYNAMIC, // D3D11_USAGE usage = D3D11_USAGE_DEFAULT
D3D11_CPU_ACCESS_WRITE, // cpuaccessFlags
0, // miscFlags
0 // structureByteStride
);
OutputDebugString(L"Create vertex buffer\n");
DX::ThrowIfFailed(
m_d3dDevice->CreateBuffer(
&vertexBufferDesc,
&vertexBufferData,
&m_vertexBuffer
)
);
}
unsigned long* indices = new unsigned long[m_indexCount];
if(!indices)
{
OutputDebugString(L"Can't create the index array.");
}
else
{
// Initialize the index array.
for(int i=0; i<m_indexCount; i++)
{
indices[i] = i;
}
// Set up the description of the static index buffer.
// Create the index array.
D3D11_BUFFER_DESC indexBufferDesc;
indexBufferDesc.Usage = D3D11_USAGE_DEFAULT;
indexBufferDesc.ByteWidth = sizeof(unsigned long) * m_indexCount;
indexBufferDesc.BindFlags = D3D11_BIND_INDEX_BUFFER;
indexBufferDesc.CPUAccessFlags = 0;
indexBufferDesc.MiscFlags = 0;
indexBufferDesc.StructureByteStride = 0;
// Give the subresource structure a pointer to the index data.
D3D11_SUBRESOURCE_DATA indexData;
indexData.pSysMem = indices;
indexData.SysMemPitch = 0;
indexData.SysMemSlicePitch = 0;
// Create the index buffer.
DX::ThrowIfFailed(
m_d3dDevice->CreateBuffer(
&indexBufferDesc,
&indexData,
&m_indexBuffer
)
);
// Release the index array since it is no longer needed.
delete [] indices;
indices = 0;
}
});
createCubeTask.then([this] () {
m_loadingComplete = true;
});
}
My vertex is position, tex, and color:
struct VertexType
{
DirectX::XMFLOAT3 position;
DirectX::XMFLOAT2 texture;
DirectX::XMFLOAT4 color;
};
Microsoft::WRL::ComPtr<ID3D11SamplerState> m_sampleState;
VertexType* m_vertices;
Microsoft::WRL::ComPtr<ID3D11Buffer> m_vertexBuffer;
const D3D11_INPUT_ELEMENT_DESC vertexDesc[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "COLOR", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
};
VertexShader.HLSL:
cbuffer ModelViewProjectionConstantBuffer : register(b0)
{
matrix model;
matrix view;
matrix projection;
};
struct VertexInputType
{
float4 position : POSITION;
float2 tex : TEXCOORD0;
float4 color : COLOR;
};
struct PixelInputType
{
float4 position : SV_POSITION;
float2 tex : TEXCOORD0;
float4 color : COLOR;
};
PixelInputType main(VertexInputType input)
{
PixelInputType output;
// Change the position vector to be 4 units for proper matrix calculations.
input.position.w = 1.0f;
// Calculate the position of the vertex against the world, view, and projection matrices.
output.position = mul(input.position, model);
output.position = mul(output.position, view);
output.position = mul(output.position, projection);
// Store the texture coordinates for the pixel shader.
output.tex = input.tex;
// Store the particle color for the pixel shader.
output.color = input.color;
return output;
}
After the call to: m_d3dDevice->CreateBuffer(&vertexBufferDesc, &vertexBufferData, &m_vertexBuffer) the m_vertexBuffer is not null.
But when I get to my Update() function, m_vertexBuffer is NULL!
D3D11_MAPPED_SUBRESOURCE mappedResource;
if (m_vertexBuffer == nullptr)
{
OutputDebugString(L"m_vertexBuffer is null\n");
}
else
{
// Lock the vertex buffer.
DX::ThrowIfFailed(m_d3dContext->Map(m_vertexBuffer.Get(), 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedResource));
// Get a pointer to the data in the vertex buffer.
VertexType * verticesPtr = (VertexType*)mappedResource.pData;
//// Copy the data into the vertex buffer.
int sizeOfVertices = sizeof(VertexType) * m_vertexCount;
memcpy(verticesPtr, (void*)m_vertices, sizeOfVertices);
//// Unlock the vertex buffer.
m_d3dContext->Unmap(m_vertexBuffer.Get(), 0);
}
About the sampler, you need to assign it to your pixelshader.
m_d3dContext.Get()->PSSetSamplers(0,1,&m_sampleState);
I was making the incorrect call on m_vertexBuffer when I go Render it.
This fixes the error with the m_vertexBuffer being null.
Previous:
m_d3dContext.Get()->IASetVertexBuffers(0, 1, &m_vertexBuffer, &stride, &offset);
Fix:
m_d3dContext.Get()->IASetVertexBuffers(0, 1, m_vertexBuffer.GetAddressOf(), &stride, &offset);
It doesn't fix the error with the sampler states though:
DrawIndexed: The Pixel Shader unit expects a Sampler to be set at Slot
0

shader.hlsl file causes error?

I'm working on trying to get this DirectX11 proj to load a triangle on the screen that uses a shader.hlsl file to color the triangle based on the positions within the triangle (it is a multi colored triangle that blends together). I am not getting a normal output error either this time. Not sure how to approach/handle this one.
//function that invokes the shaders.hlsl file
void InitPipeline()
{
// load and compile the two shaders
ID3D10Blob *VS, *PS;
D3DX11CompileFromFile("shaders.hlsl", 0, 0, "VShader", "vs_5_0", 0, 0, 0, &VS, 0, 0);
D3DX11CompileFromFile("shaders.hlsl", 0, 0, "PShader", "ps_5_0", 0, 0, 0, &PS, 0, 0);
// encapsulate both shaders into shader objects
dev->CreateVertexShader(VS->GetBufferPointer(), VS->GetBufferSize(), NULL, &pVS);
dev->CreatePixelShader(PS->GetBufferPointer(), PS->GetBufferSize(), NULL, &pPS);
// set the shader objects
devcon->VSSetShader(pVS, 0, 0);
devcon->PSSetShader(pPS, 0, 0);
// create the input layout object
D3D11_INPUT_ELEMENT_DESC ied[] =
{
{"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"COLOR", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0},
};
dev->CreateInputLayout(ied, 2, VS->GetBufferPointer(), VS->GetBufferSize(), &pLayout);
devcon->IASetInputLayout(pLayout);
}
struct VOut {
float4 position : SV_POSITION;
float4 color : COLOR;
};
VOut VShader(float4 position : POSITION, float4 color : COLOR) {
VOut output;
output.position = position;
output.color = color;
return output;
}
float4 PShader(float4 position : SV_POSITION, float4 color : COLOR) : SV_TARGET {
return color;
}
Are you using Visual Studio? Right click shaders.hlsl, find the complete path, and replace shaders.hlsl in your D3DX11CompileFromFile functions with the absolute path. Make sure to replace \ characters with \\ for it to be escaped properly.
Visual Studio leaves the shaders.hlsl in the same location as the source code which means that your program can't find it. Is the blue window still displaying? That's the issue I was having.