shader.hlsl file causes error? - c++

I'm working on trying to get this DirectX11 proj to load a triangle on the screen that uses a shader.hlsl file to color the triangle based on the positions within the triangle (it is a multi colored triangle that blends together). I am not getting a normal output error either this time. Not sure how to approach/handle this one.
//function that invokes the shaders.hlsl file
void InitPipeline()
{
// load and compile the two shaders
ID3D10Blob *VS, *PS;
D3DX11CompileFromFile("shaders.hlsl", 0, 0, "VShader", "vs_5_0", 0, 0, 0, &VS, 0, 0);
D3DX11CompileFromFile("shaders.hlsl", 0, 0, "PShader", "ps_5_0", 0, 0, 0, &PS, 0, 0);
// encapsulate both shaders into shader objects
dev->CreateVertexShader(VS->GetBufferPointer(), VS->GetBufferSize(), NULL, &pVS);
dev->CreatePixelShader(PS->GetBufferPointer(), PS->GetBufferSize(), NULL, &pPS);
// set the shader objects
devcon->VSSetShader(pVS, 0, 0);
devcon->PSSetShader(pPS, 0, 0);
// create the input layout object
D3D11_INPUT_ELEMENT_DESC ied[] =
{
{"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"COLOR", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0},
};
dev->CreateInputLayout(ied, 2, VS->GetBufferPointer(), VS->GetBufferSize(), &pLayout);
devcon->IASetInputLayout(pLayout);
}
struct VOut {
float4 position : SV_POSITION;
float4 color : COLOR;
};
VOut VShader(float4 position : POSITION, float4 color : COLOR) {
VOut output;
output.position = position;
output.color = color;
return output;
}
float4 PShader(float4 position : SV_POSITION, float4 color : COLOR) : SV_TARGET {
return color;
}

Are you using Visual Studio? Right click shaders.hlsl, find the complete path, and replace shaders.hlsl in your D3DX11CompileFromFile functions with the absolute path. Make sure to replace \ characters with \\ for it to be escaped properly.
Visual Studio leaves the shaders.hlsl in the same location as the source code which means that your program can't find it. Is the blue window still displaying? That's the issue I was having.

Related

Modify a IDirect3DSurface9* with shader

I got a IDirect3DSurface9 * from a DXVA2 video decoder. I'd like to modify that surface with a shader. I' m able to render the video frames without shader by "drawing" the surface in the back buffer.
I use the following code to render the frames without shader:
void Draw(Example* exps){
IDirect3DSurface9* backbuffer;
hwctx->d3d9device->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 0), 1.0f, 0);
hwctx->d3d9device->BeginScene();
hwctx->swap_chain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &backbuffer);
hwctx->d3d9device->StretchRect(videoSurface, NULL, backbuffer, NULL, D3DTEXF_LINEAR);
hwctx->d3d9device->EndScene();
hwctx->swap_chain->Present(0, 0, 0, 0, 0);
backbuffer->Release();
}
Up until here, everithing works.
I would modify the Draw function to render video frames with the following shader:
uniform extern float4x4 gWVP;
uniform extern texture gTexRGB;
uniform extern texture gTexAlpha;
sampler TexRGB = sampler_state
{
Texture = <gTexRGB>;
AddressU = WRAP;
AddressV = WRAP;
};
sampler TexAlpha = sampler_state
{
Texture = <gTexAlpha>;
AddressU = WRAP;
AddressV = WRAP;
};
struct OutputVS
{
float4 posH : POSITION0;
float2 tex0 : TEXCOORD0;
};
OutputVS DirLightTexVS(float3 posL : POSITION0, float3 normalL : NORMAL0, float2 tex0: TEXCOORD0)
{
// Zero out our output.
OutputVS outVS = (OutputVS)0;
// Transform to homogeneous clip space.
outVS.posH = mul(float4(posL, 1.0f), gWVP);
// Pass on texture coordinates to be interpolated in rasterization.
outVS.tex0 = tex0;
// Done--return the output.
return outVS;
}
float4 DirLightTexPS(float4 c : COLOR0, float4 spec : COLOR1, float2 tex0 : TEXCOORD0) : COLOR
{
float3 rgb = tex2D(TexRGB, tex0).rgb;
float alpha = tex2D(TexAlpha, tex0).g;
return float4(rgb, alpha);
}
technique DirLightTexTech
{
pass P0
{
// Specify the vertex and pixel shader associated with this pass.
vertexShader = compile vs_2_0 DirLightTexVS();
pixelShader = compile ps_2_0 DirLightTexPS();
}
}
where TexRGB is the texture associated to the video frame, while TexAlpha is another texture that contains alpha values.
How can I pass the decoded surface to the shader? I never used Directx9 so an example is appreciated and could help me to solve the problem.
UPDATE 1:
I created the InitEffect function to load the effect from file
void InitEffect(Example* ctx) {
auto hwctx = ctx->decoder->stream->HWAccelCtx;
// Create the FX from a .fx file.
ID3DXBuffer* errors = 0;
D3DXCreateEffectFromFile(hwctx->d3d9device, "basicEffect.fx", 0, 0, D3DXSHADER_DEBUG, 0, &ctx->mFX, &errors);
if (errors)
MessageBox(0, (LPCSTR)errors->GetBufferPointer(), 0, 0);
// Obtain handles.
ctx->mhTech = ctx->mFX->GetTechniqueByName("DirLightTexTech");
ctx->mhWVP = ctx->mFX->GetParameterByName(0, "gWVP");
ctx->mhTexAlpha = ctx->mFX->GetParameterByName(0, "gTexAlpha");
ctx->mhTexRGB = ctx->mFX->GetParameterByName(0, "gTexRGB");
}
and changed the rendering fuction to:
void Draw(Example* ctx) {
InitMatrices(ctx);
IDirect3DSurface9* backbuffer;
hwctx->d3d9device->Clear(0, 0, D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER, 0xffeeeeee, 1.0f, 0);
hwctx->d3d9device->BeginScene();
ctx->mFX->SetTechnique(ctx->mhTech);
ctx->mFX->SetMatrix(ctx->mhWVP, &(ctx->mCrateWorld*ctx->mView*ctx->mProj));
ctx->texRGB->GetSurfaceLevel(0, &ctx->surfRGB);
hwctx->d3d9device->SetRenderTarget(0, ctx->surfRGB);
hwctx->d3d9device->StretchRect((IDirect3DSurface9*)s->frame->data[3], NULL, ctx->surfRGB, NULL, D3DTEXF_LINEAR);
ctx->mFX->SetTexture(ctx->mhTexAlpha, ctx->texAlpha);
ctx->mFX->SetTexture(ctx->mhTexRGB, ctx->texRGB);
// Enable alpha blending.
hwctx->d3d9device->SetRenderState(D3DRS_ALPHABLENDENABLE, true);
hwctx->d3d9device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_SRCALPHA);
hwctx->d3d9device->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCALPHA);
hwctx->d3d9device->SetVertexDeclaration(VertexPNT::Decl);
hwctx->d3d9device->SetStreamSource(0, ctx->mBoxVB, 0, sizeof(VertexPNT));
hwctx->d3d9device->SetIndices(ctx->mBoxIB);
UINT numPasses = 0;
ctx->mFX->Begin(&numPasses, 0);
for (UINT i = 0; i < numPasses; ++i){
ctx->mFX->BeginPass(i);
hwctx->d3d9device->DrawIndexedPrimitive(D3DPT_TRIANGLELIST, 0, 0, 24, 0, 12);
ctx->mFX->EndPass();
}
ctx->mFX->End();
hwctx->d3d9device->EndScene();
hwctx->swap_chain->Present(0, 0, 0, 0, 0);
backbuffer->Release();
// Disable alpha blending.
hwctx->d3d9device->SetRenderState(D3DRS_ALPHABLENDENABLE, false);
}
but it still doesn't work.
UPDATE 2
I modified the code by following the code Asesh shared. In the InitEffect function I added the following lines to create a render target surface:
ctx->texRGB->GetSurfaceLevel(0, &ctx->surfRGB);
// store orginal rendertarget
hwctx->d3d9device->GetRenderTarget(0, &ctx->origTarget_);
D3DSURFACE_DESC desc;
ctx->origTarget_->GetDesc(&desc);
// create our surface as render target
hwctx->d3d9device->CreateRenderTarget(1920, 1080, D3DFMT_X8R8G8B8,
desc.MultiSampleType, desc.MultiSampleQuality,
false, &ctx->surfRGB, NULL);
the draw function is:
void drawScene(Example* ctx) {
InitMatrices(ctx);
auto hwctx = ctx->decoder->stream->HWAccelCtx;
auto s = (VdrStreamContext*)ctx->decoder->stream->vdrCodecCtx->opaque;
IDirect3DSurface9* backbuffer;
hwctx->d3d9device->SetRenderTarget(0, ctx->surfRGB);
hwctx->d3d9device->Clear(0, 0, D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER, 0xffeeeeee, 1.0f, 0);
hwctx->d3d9device->BeginScene();
hwctx->d3d9device->StretchRect((IDirect3DSurface9*)s->vdrFrame->data[3], NULL, ctx->surfRGB, NULL, D3DTEXF_NONE);
hwctx->d3d9device->SetRenderTarget(0, ctx->origTarget_);
if (!hwctx->d3d9device->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &backbuffer)) {
hwctx->d3d9device->StretchRect(ctx->surfRGB, NULL, backbuffer, NULL, D3DTEXF_NONE);
}
ctx->mFX->SetTechnique(ctx->mhTech);
ctx->mFX->SetMatrix(ctx->mhWVP, &(ctx->mCrateWorld*ctx->mView*ctx->mProj));
ctx->mFX->SetTexture(ctx->mhTexAlpha, ctx->texAlpha);
ctx->mFX->SetTexture(ctx->mhTexRGB, ctx->texRGB);
// Enable alpha blending.
hwctx->d3d9device->SetRenderState(D3DRS_ALPHABLENDENABLE, true);
hwctx->d3d9device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_SRCALPHA);
hwctx->d3d9device->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCALPHA);
hwctx->d3d9device->SetVertexDeclaration(VertexPNT::Decl);
hwctx->d3d9device->SetStreamSource(0, ctx->mBoxVB, 0, sizeof(VertexPNT));
hwctx->d3d9device->SetIndices(ctx->mBoxIB);
UINT numPasses = 0;
ctx->mFX->Begin(&numPasses, 0);
for (UINT i = 0; i < numPasses; ++i){
ctx->mFX->BeginPass(i);
hwctx->d3d9device->DrawIndexedPrimitive(D3DPT_TRIANGLELIST, 0, 0, 24, 0, 12);
ctx->mFX->EndPass();
}
ctx->mFX->End();
hwctx->d3d9device->EndScene();
hwctx->d3d9device->Present(0, 0, 0, 0);
backbuffer->Release();
// Disable alpha blending.
hwctx->d3d9device->SetRenderState(D3DRS_ALPHABLENDENABLE, false);
}
by drawing in the backbuffer by hwctx->d3d9device->StretchRect(ctx->surfRGB, NULL, backbuffer, NULL, D3DTEXF_NONE); even if the ctx->surfRGB is associated to the texture passed to the shader, the video frame is showed on the screen but alpha blending is not applied. If I remove hwctx->d3d9device->StretchRect(ctx->surfRGB, NULL, backbuffer, NULL, D3DTEXF_NONE); video frame is not showed even if the ctx->surfRGB is not empty.

DX11 Losing Instance Buffer Data

I've got a function that basically creates different instance buffers into an array for me to use in my DrawIndexedInstanced call.
But when I pass the vertex buffer and instance buffer through to my shader, my instance data is completely lost when the shader goes to use it, so none of my objects are being relocated and are thus all rendering in the same place.
I've been looking at this for hours and literally cannot find anything that is helpful.
Creating the Vertex shader input layout:
D3D11_INPUT_ELEMENT_DESC solidColorLayout[] =
{
//Vertex Buffer
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 },
//Instance buffer
{ "INSTANCEPOS", 0, DXGI_FORMAT_R32G32B32_FLOAT, 1, 0, D3D11_INPUT_PER_INSTANCE_DATA, 1 },
{ "INSTANCEROT", 0, DXGI_FORMAT_R32G32B32_FLOAT, 1, 12, D3D11_INPUT_PER_INSTANCE_DATA, 1 },
{ "INSTANCESCA", 0, DXGI_FORMAT_R32G32B32_FLOAT, 1, 24, D3D11_INPUT_PER_INSTANCE_DATA, 1 },
{ "INSTANCETEX", 0, DXGI_FORMAT_R32_FLOAT, 1, 36, D3D11_INPUT_PER_INSTANCE_DATA, 1 }
};
Creating an instance buffer (called multiple times per frame, to create all necessary buffers):
void GameManager::CreateInstanceBuffer(ID3D11Buffer** buffer, Mesh* mesh, std::vector<Instance> instances)
{
D3D11_BUFFER_DESC instBuffDesc;
ZeroMemory(&instBuffDesc, sizeof(instBuffDesc));
instBuffDesc.Usage = D3D11_USAGE_DEFAULT;
instBuffDesc.ByteWidth = sizeof(Instance) * instances.size();
instBuffDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
instBuffDesc.CPUAccessFlags = 0;
instBuffDesc.MiscFlags = 0;
instBuffDesc.StructureByteStride = 0;
int i = sizeof(Instance);
D3D11_SUBRESOURCE_DATA instData;
ZeroMemory(&instData, sizeof(instData));
instData.pSysMem = &instances;
instData.SysMemPitch = 0;
instData.SysMemSlicePitch = 0;
CheckFailWithError(dxManager.GetDevice()->CreateBuffer(&instBuffDesc, &instData, buffer),
"An error occurred whilst building an instance buffer",
"[GameManager]");
meshBuffers.push_back(mesh->GetBuffer(VERTEX_BUFFER));
}
The draw command:
dxManager.GetContext()->DrawIndexedInstanced(instanceIndexCounts[buffer], instanceCounts[buffer], 0, 0, 0);
The shader:
cbuffer cbChangesEveryFrame : register(b0)
{
matrix worldMatrix;
};
cbuffer cbNeverChanges : register(b1)
{
matrix viewMatrix;
};
cbuffer cbChangeOnResize : register(b2)
{
matrix projMatrix;
};
struct VS_Input
{
float4 pos : POSITION;
float2 tex0 : TEXCOORD0;
float4 instancePos : INSTANCEPOS;
float4 instanceRot : INSTANCEROT;
float4 instanceSca : INSTANCESCA;
float instanceTex : INSTANCETEX;
};
PS_Input VS_Main(VS_Input vertex)
{
PS_Input vsOut = (PS_Input)0;
vsOut.pos = mul(vertex.pos + vertex.instancePos, worldMatrix);
vsOut.pos = mul(vsOut.pos, viewMatrix);
vsOut.pos = mul(vsOut.pos, projMatrix);
vsOut.tex0 = vertex.tex0;
return vsOut;
}
I've used the graphics debugger built into Visual Studio. Initially it appeared to be assigning variables in the Vertex shader back to front, however removing APPEND_ALIGNED_ELEMENT from the AlignedByteOffset has fixed that, however the per-instance data seems to be corrupt and is not getting recieved.
If there is anything else you need let me know and I'll update the post as necessary.
The problem lies in your subresource data.
instData.pSysMem = &instances;
You are not specifying which offset to read the memory from. Try using
instData.pSysMem = &instances[0];
or
instData.pSysMem = &instances.at(0);
That clarifies where to start reading memory from and will hopefully fix your issue.

Normals are not transfered to DirectX 11 shader correctly - random, time-dependent values?

Today I was trying to add normal maps to my DirectX 11 application.
Something went wrong. I've decided to output the normals' information instead of color on scene objects to "see" where lies the problem.
What surprised me is that the normals' values changes very fast (the colors are blinking each frame). And I'm sure that I don't manipulate with their values during program execution (the position of vertices stays stable, but the normals do not).
Here are two screens for some frames at t1 and t2:
My vertex structure:
struct MyVertex{//vertex structure
MyVertex() : weightCount(0), normal(0,0,0){
//textureCoordinates.x = 1;
//textureCoordinates.y = 1;
}
MyVertex(float x, float y, float z, float u, float v, float nx, float ny, float nz)
: position(x, y, z), textureCoordinates(u, v), normal(0,0,0), weightCount(0){
}
DirectX::XMFLOAT3 position;
DirectX::XMFLOAT2 textureCoordinates;
DirectX::XMFLOAT3 normal = DirectX::XMFLOAT3(1.0f, 0.0f, 0.0f);
//will not be sent to shader (and used only by skinned models)
int startWeightIndex;
int weightCount; //=0 means that it's not skinned vertex
};
The corresponding vertex layout:
layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 20, D3D11_INPUT_PER_VERTEX_DATA, 0 };
Vertex buffer:
D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DEFAULT; //D3D11_USAGE_DYNAMIC
bd.ByteWidth = sizeof(MyVertex) * structure->getVerticesCount();
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = 0;
D3D11_SUBRESOURCE_DATA InitData;
ZeroMemory(&InitData, sizeof(InitData));
InitData.pSysMem = structure->vertices;
if(device->CreateBuffer(&bd, &InitData, &buffers->vertexBuffer) != S_OK){
return false;
}
And the shader that output normals "as color" (of course, if I set output.normal to float3(1,1,1), objects stays white):
struct Light
{
float3 diffuse;
float3 position;
float3 direction;
};
cbuffer cbPerObject : register(b0)
{
float4x4 WVP;
float4x4 World;
float4 difColor;
bool hasTexture;
bool hasNormMap;
};
cbuffer cbPerFrame : register(b1)
{
Light light;
};
Texture2D ObjTexture;
Texture2D ObjNormMap;
SamplerState ObjSamplerState;
TextureCube SkyMap;
struct VS_INPUT
{
float4 position : POSITION;
float2 tex : TEXCOORD;
float3 normal : NORMAL;
};
struct VS_OUTPUT
{
float4 Pos : SV_POSITION;
float4 worldPos : POSITION;
float3 normal : NORMAL;
float2 TexCoord : TEXCOORD;
float3 tangent : TANGENT;
};
VS_OUTPUT VS(VS_INPUT input)
{
VS_OUTPUT output;
//input.position.w = 1.0f;
output.Pos = mul(input.position, WVP);
output.worldPos = mul(input.position, World);
output.normal = input.normal;
output.tangent = mul(input.tangent, World);
output.TexCoord = input.tex;
return output;
}
float4 PS(VS_OUTPUT input) : SV_TARGET
{
return float4(input.normal, 1.0);
}
//--------------------------------------------------------------------------------------
// Techniques
//--------------------------------------------------------------------------------------
technique10 RENDER
{
pass P0
{
SetVertexShader( CompileShader( vs_4_0, VS() ) );
SetPixelShader( CompileShader( ps_4_0, PS() ) );
SetBlendState( SrcAlphaBlendingAdd, float4( 0.0f, 0.0f, 0.0f, 0.0f ), 0xFFFFFFFF );
}
}
Where have I made an mistake? Maybe there are other places in code that can cause that strange behavior (some locking, buffers, dunno...)?
edit:
As 413X suggested, I've run the DirectX Diagnostic:
What is strange that on the small preview, the screen looks the same as in program. But when I investigate that frame (screenshot), I got completely different colors:
Also, here's something strange - I pick the blue pixel and it's says it's black (on the right):
edit 2:
As catflier requested I post some additional code.
The rendering and buffers binding:
//set the object world matrix
DirectX::XMMATRIX objectWorldMatrix = DirectX::XMMatrixIdentity();
DirectX::XMMATRIX rotationMatrix = DirectX::XMMatrixRotationQuaternion(
DirectX::XMVectorSet(object->getOrientation().getX(), object->getOrientation().getY(), object->getOrientation().getZ(), object->getOrientation().getW())
);
irectX::XMMATRIX scaleMatrix = (
object->usesScaleMatrix()
? DirectX::XMMatrixScaling(object->getHalfSize().getX(), object->getHalfSize().getY(), object->getHalfSize().getZ())
: DirectX::XMMatrixScaling(1.0f, 1.0f, 1.0f)
);
DirectX::XMMATRIX translationMatrix = DirectX::XMMatrixTranslation(object->getPosition().getX(), object->getPosition().getY(), object->getPosition().getZ());
objectWorldMatrix = scaleMatrix * rotationMatrix * translationMatrix;
UINT stride = sizeof(MyVertex);
UINT offset = 0;
context->IASetVertexBuffers(0, 1, &buffers->vertexBuffer, &stride, &offset); //set vertex buffer
context->IASetIndexBuffer(buffers->indexBuffer, DXGI_FORMAT_R16_UINT, 0); //set index buffer
//set the constants per object
ConstantBufferStructure constantsPerObject;
//set matrices
DirectX::XMFLOAT4X4 view = myCamera->getView();
DirectX::XMMATRIX camView = XMLoadFloat4x4(&view);
DirectX::XMFLOAT4X4 projection = myCamera->getProjection();
DirectX::XMMATRIX camProjection = XMLoadFloat4x4(&projection);
DirectX::XMMATRIX worldViewProjectionMatrix = objectWorldMatrix * camView * camProjection;
constantsPerObject.worldViewProjection = XMMatrixTranspose(worldViewProjectionMatrix);
constantsPerObject.world = XMMatrixTranspose(objectWorldMatrix);
//draw objects's non-transparent subsets
for(int i=0; i<structure->subsets.size(); i++){
setColorsAndTextures(structure->subsets[i], constantsPerObject, context); //custom method that insert data into constantsPerObject variable
//bind constants per object to constant buffer and send it to vertex and pixel shaders
context->UpdateSubresource(constantBuffer, 0, NULL, &constantsPerObject, 0, 0);
context->VSSetConstantBuffers(0, 1, &constantBuffer);
context->PSSetConstantBuffers(0, 1, &constantBuffer);
context->RSSetState(RSCullDefault);
int start = structure->subsets[i]->getVertexIndexStart();
int count = structure->subsets[i]->getVertexIndexAmmount();
context->DrawIndexed(count, start, 0);
}
The rasterizer:
void RendererDX::initCull(ID3D11Device * device){
D3D11_RASTERIZER_DESC cmdesc;
ZeroMemory(&cmdesc, sizeof(D3D11_RASTERIZER_DESC));
cmdesc.FillMode = D3D11_FILL_SOLID;
cmdesc.CullMode = D3D11_CULL_BACK;
#ifdef GRAPHIC_LEFT_HANDED
cmdesc.FrontCounterClockwise = true;
#else
cmdesc.FrontCounterClockwise = false;
#endif
cmdesc.CullMode = D3D11_CULL_NONE;
//cmdesc.FillMode = D3D11_FILL_WIREFRAME;
HRESULT hr = device->CreateRasterizerState(&cmdesc, &RSCullDefault);
}
edit 3:
The debugger output (there are some mismatches in semantics?):
D3D11 ERROR: ID3D11DeviceContext::DrawIndexed: Input Assembler - Vertex Shader linkage error: Signatures between stages are incompatible. The input stage requires Semantic/Index (NORMAL,0) as input, but it is not provided by the output stage. [ EXECUTION ERROR #342: DEVICE_SHADER_LINKAGE_SEMANTICNAME_NOT_FOUND]
D3D11 ERROR: ID3D11DeviceContext::DrawIndexed: Input Assembler - Vertex Shader linkage error: Signatures between stages are incompatible. Semantic 'TEXCOORD' is defined for mismatched hardware registers between the output stage and input stage. [ EXECUTION ERROR #343: DEVICE_SHADER_LINKAGE_REGISTERINDEX]
D3D11 ERROR: ID3D11DeviceContext::DrawIndexed: Input Assembler - Vertex Shader linkage error: Signatures between stages are incompatible. Semantic 'TEXCOORD' in each signature have different min precision levels, when they must bet identical. [ EXECUTION ERROR #3146050: DEVICE_SHADER_LINKAGE_MINPRECISION]
I am pretty sure your bytes are missaligned. A float is 4 bytes me thinks and a float4 is then 16 bytes. And it wants to be 16 byte aligned. So observe:
layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 20, D3D11_INPUT_PER_VERTEX_DATA, 0 };
The value; 0,12,20. (AlignedByteOffset) Is where the value then starts. Which would mean; Position starts at 0. Texcoord starts at the end of a float3, which gives you wrong results. Because look inside the shader:
struct VS_INPUT
{
float4 position : POSITION;
float2 tex : TEXCOORD;
float3 normal : NORMAL;
};
And Normal at float3+float2. So generally, you want to align things more consistantly. Maybe even "padding" to fill the spaces to keep all the variables at 16 bytes aligned.
But to keep it more simple for you. You want to switch that statement to:
layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 };
What happens now? Well, the thing aligns itself automagically, however it can be less optimal. But one thing about shaders, try to keep it 16 byte aligned.
Your data structure on upload doesn't match your Input Layout declaration.
since your data structure for vertex is :
struct MyVertex{//vertex structure
MyVertex() : weightCount(0), normal(0,0,0){
//textureCoordinates.x = 1;
//textureCoordinates.y = 1;
}
MyVertex(float x, float y, float z, float u, float v, float nx, float ny, float nz)
: position(x, y, z), textureCoordinates(u, v), normal(0,0,0), weightCount(0){
}
DirectX::XMFLOAT3 position;
DirectX::XMFLOAT2 textureCoordinates;
DirectX::XMFLOAT3 normal = DirectX::XMFLOAT3(1.0f, 0.0f, 0.0f);
//will not be sent to shader (and used only by skinned models)
int startWeightIndex;
int weightCount; //=0 means that it's not skinned vertex
};
startWeightIndex and weightCount will be copied into your vertex buffer (even if they do not contain anything useful.
If you check sizeof(MyVertex), you will have a size of 40.
Now let's look at your input layout declaration (whether you use automatic offset or not is irrelevant):
layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 20, D3D11_INPUT_PER_VERTEX_DATA, 0 };
From what you see here, you are declaring a data structure of (12+8+12) = 32 bytes, which of course does not match your vertex size.
So first vertex will be fetched properly, but next ones will start to use invalid data (as the Input Assembler doesn't know that your data structure is bigger than what you specified to it).
Two ways to fix it:
1/ Strip your vertex declaration
In that case you modify your vertex structure to match your input declaration (I removed constructors for brevity:
struct MyVertex
{//vertex structure
DirectX::XMFLOAT3 position;
DirectX::XMFLOAT2 textureCoordinates;
DirectX::XMFLOAT3 normal = DirectX::XMFLOAT3(1.0f, 0.0f, 0.0f);
};
Now your vertex structure exactly matches your declaration, so vertices will be fetched properly.
2/Adapt your Input Layout declaration:
In that case you change your layout to make sure that all data contained in your buffer is declared, so it can be taken into account by the Input Assembler (see below)
Now your declaration becomes:
layout[0] = { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[1] = { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[2] = { "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 20, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[3] = { "STARTWEIGHTINDEX", 0, DXGI_FORMAT_R32_SINT, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0 };
layout[4] = { "WEIGHTCOUNT", 0, DXGI_FORMAT_R32_SINT, 0, 36, D3D11_INPUT_PER_VERTEX_DATA, 0 };
So that means you inform the Input assembler of all the data that your structure contains.
In that case even if the data is not needed by your Vertex Shader, as you specified a full data declaration, Input assembler will safely ignore STARTWEIGHTINDEX and WEIGHTCOUNT, but will respect your whole structure padding.

DirectX Toolkit loaded model with custom HLSL shader Vertex shader input signature

I am working on a project in which I use DirectX Toolkit to load FBX models. As I understood, DXTK does not support HLSL shaders for its models, so I have to get the model information (vertex buffer, index buffer, etc.) from the model object and implement a standart Direct3D drawing, if I want to use HLSL shaders for the rendering.
My problem is, that I can't reach the texture coordinates from the vertex shader. For testing, I set the vertex shader to pass through the coordinates to the pixel shader, where I color the whole object with the texture coordinates like this:
float4(input.texCoord, 0.0f, 1.0f);
In the result, the whole object is black, so the texcoords are (0.0, 0.0) everywhere. I checked the model with the DXTK Model::Draw(...) funcion, and it is textured the correct way, so my model, and my model loading code seems correct.
I learnt that DXTK model loading uses the following vertex buffer declaration:
const D3D11_INPUT_ELEMENT_DESC VertexPositionNormalTangentColorTexture::InputElements[] =
{
{ "SV_Position", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TANGENT", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "COLOR", 0, DXGI_FORMAT_R8G8B8A8_UNORM, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
};
So I tried to match my vertex shader input structure like this:
struct VertexShaderInput
{
float3 pos : SV_Position;
float3 normal : NORMAL;
float4 tangent : TANGENT;
float4 color : COLOR;
float2 texCoord : TEXCOORD;
};
Here is how I load the model:
void SceneObject::LoadMesh(
std::shared_ptr<DX::DeviceResources> deviceResources,
const wchar_t* modelFile)
{
auto device = deviceResources->GetD3DDevice();
EffectFactory fx(device);
this->model = Model::CreateFromCMO(device, modelFile, fx, true);
}
And here is my draw function:
void SceneObject::Draw(std::shared_ptr<DX::DeviceResources> deviceResources)
{
auto device = deviceResources->GetD3DDevice();
auto context = deviceResources->GetD3DDeviceContext();
CommonStates states(device);
context->UpdateSubresource(
this->vsConstantBuffer.Get(),
0,
NULL,
&this->vsConstantBufferData,
0,
0);
context->UpdateSubresource(
this->psConstantBuffer.Get(),
0,
NULL,
&this->psConstantBufferData,
0,
0);
//model->Draw(context, states, local, view, proj);
XMVECTOR qid = XMQuaternionIdentity();
const XMVECTORF32 scale = { 1.f, 1.f, 1.f };
const XMVECTORF32 translate = { 0.f, 0.f, 0.f };
//XMVECTOR rotate = XMQuaternionRotationRollPitchYaw(0, XM_PI / 2.f, -XM_PI / 2.f);
XMVECTOR rotate = XMQuaternionRotationRollPitchYaw(0, 0, 0);
XMMATRIX worldMatrix = XMLoadFloat4x4(&this->vsConstantBufferData.model);
XMMATRIX local = XMMatrixMultiply(worldMatrix, XMMatrixTransformation(g_XMZero, qid, scale, g_XMZero, rotate, translate));
//this->model->Draw(context, states, local, XMLoadFloat4x4(&vsConstantBufferData.view), XMLoadFloat4x4(&vsConstantBufferData.projection), false);
XMStoreFloat4x4(&this->vsConstantBufferData.model, local);
for each(auto& mesh in model->meshes)
{
for each (auto& part in mesh->meshParts)
{
context->IASetVertexBuffers(
0,
1,
part->vertexBuffer.GetAddressOf(),
&part->vertexStride,
&part->vertexOffset
);
context->IASetIndexBuffer(
part->indexBuffer.Get(),
part->indexFormat,
0
);
context->IASetPrimitiveTopology(part->primitiveType);
//context->IASetInputLayout(inputLayout.Get());
context->IASetInputLayout(part->inputLayout.Get());
// Attach our vertex shader.
context->VSSetShader(
vertexShader.Get(),
nullptr,
0
);
// Send the constant buffer to the graphics device.
context->VSSetConstantBuffers(
0,
1,
vsConstantBuffer.GetAddressOf()
);
// Attach our pixel shader.
context->PSSetShader(
pixelShader.Get(),
nullptr,
0
);
// Send the constant buffer to the graphics device.
context->PSSetConstantBuffers(
1,
1,
psConstantBuffer.GetAddressOf()
);
context->PSSetShaderResources(0, 1, diffuseTexture.GetAddressOf());
context->PSSetSamplers(0, 1, linearSampler.GetAddressOf());
// Draw the objects.
context->DrawIndexed(
part->indexCount,
part->startIndex,
0
);
}
}
}
If you need more code, you can check my whole project here: https://github.com/GiGu92/WaterRenderingDemo
What am I messing up?
Your code above doesn't indicate where you are creating your vertex shader.
For CMOs, take a look at Src\Shaders\DGSLEffect.fx for how they are used with custom shaders.
The default EffectFactory or DGSLEffectFactory is setting up a standard vertex & pixel shader for rendering the Model as a BasicEffect, SkinnedEffect, or DGSLEffect. Details on doing custom rendering are covered in the wiki, but I suggest your first get it rendering using the default effects as you expect it. See the tutorial.
You can override the entire pipeline if desired in a number of ways:
Implement your own IEffect* interfaces and IEffectFactory to use with the Model loader
Override the shaders while rendering directly with ModelMesh / ModelMeshParts
For CMO files, the vertex format is indeed either VertexPositionNormalTangentColorTexture or VertexPositionNormalTangentColorTextureSkinning, although for SDKMESH files it is a bit more variable.
As always, ensure you have the debug device enabled and check the HRESULT of any Direct3D function that returns one to ensure you aren't missing some configuration or API usage problem.

HLSL shader shows wiered colors

I am trying to create a shader where i have as input the positon of the vertex, some transformation matrixes and a float4 for the color of the vertex. The manipulation of the position works fine but i dont get the correct color out of it.
Okay so here is the Inputlayout of the shader:
D3D11_INPUT_ELEMENT_DESC solidColorLayout[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "COLOR", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 }
};
And the shader itself does look like this:
cbuffer cbChangesEveryFrame : register(b0)
{
matrix worldMatrix;
};
cbuffer cbNeverChanges : register(b1)
{
matrix viewMatrix;
};
cbuffer cbChangeOnResize : register(b2)
{
matrix projMatrix;
};
struct VS_Input
{
float4 pos : POSITION;
float4 color : COLOR;
};
struct PS_Input
{
float4 pos: SV_POSITION;
float4 color: COLOR;
};
PS_Input VS_Main(VS_Input vert)
{
PS_Input vsout = (PS_Input)0;
vsout.color = vert.color;
float4 worldPos = mul(vert.pos, worldMatrix);
vsout.pos = mul(worldPos, viewMatrix);
vsout.pos = mul(vsout.pos, projMatrix);
return vsout;
}
float4 PS_Main(PS_Input psinput) : SV_TARGET
{
return psinput.color;
}
Dont get confuesed about the matrix those transformation are correct i do get the right vertexposition and so on but i dont get the color i define.
So for example i create vertexes like this:
struct VertexPos
{
XMFLOAT3 pos;
XMFLOAT4 color;
};
...
VertexPos vertices[] =
{
{ XMFLOAT3(-1.0f, 0.0f, -1.0f), XMFLOAT4(0.1f,0.1f, 0.1f, 0.1f)},
{ XMFLOAT3(1.0f, 0.0f, -1.0f), XMFLOAT4(0.1f, 0.1f, 0.1f, 0.1f) },
{ XMFLOAT3(1.0f, 0.0f, 1.0f), XMFLOAT4(0.1f, 0.1f, 0.1f, 0.1f) },
{ XMFLOAT3(-1.0f, 0.0f, 1.0f), XMFLOAT4(0.1f, 0.1f, 0.1f, 0.1f) },
};
With some indexbuffer and the drawcall itself is preaty simple:
unsigned int stride = sizeof(VertexPos);
unsigned int offset = 0;
//set inputlayout and topology for drawing the sprites
context->IASetVertexBuffers(0, 1, &m_vertexBuffer, &stride, &offset);
context->IASetIndexBuffer(m_indexBuffer, DXGI_FORMAT_R16_UINT, 0);
//calculate cube stuff
XMMATRIX worldMat = getWorldMatrix();
worldMat = XMMatrixTranspose(worldMat);
context->UpdateSubresource(worldBuffer, 0, 0, &worldMat, 0, 0);//update world matrix
//draw
context->DrawIndexed(6, 0, 0);
So i wonder whats wrong with it? (see the lines and the small faces at the site they should have the color)
got it:
My D3D11_INPUT_ELEMENT_DESCwas wrong. I forgot the AlignedByteOffset. So the correct Ofset should be:
D3D11_INPUT_ELEMENT_DESC solidColorLayout[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "COLOR", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 }
};
The litle 12 ruined it. But it works fine now.
Or even change it to this:
D3D11_INPUT_ELEMENT_DESC solidColorLayout[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "COLOR", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 }
};
Alternative you can use D3D11_APPEND_ALIGNED_ELEMENTand it will automatically caluclate the right values. (Not sure if it's "save")