Directx-11 Two vertex buffers and two input slots - c++

I have a problem with setting two separate vertex buffers (and input slots). One buffer must contain vertices, the second buffer - color data. I have found this problem here:
Direct3D multiple vertex buffers, non interleaved elements
So i followed this instructions but got error message box:
Error Code: E_INVALIDARG (0x80070057)
Calling: md3dDevice->CreateBuffer(&vbd2, &initData2, &mBoxVB2)
Here's the code:
//layout array
D3D11_INPUT_ELEMENT_DESC vertexDesc3[] =
{
{"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"COLOR", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 1, 0, D3D11_INPUT_PER_VERTEX_DATA, 0}
};
D3DX11_PASS_DESC passDesc;
mTech->GetPassByIndex(0)->GetDesc(&passDesc);
HR(md3dDevice->CreateInputLayout(vertexDesc3, 2, passDesc.pIAInputSignature,
passDesc.IAInputSignatureSize, &mInputLayout));
// buffers
ID3D11Buffer* mBoxVB;
ID3D11Buffer* mBoxVB2;
ID3D11Buffer* buffers[2];
buffers[0] = mBoxVB;
buffers[1] = mBoxVB2;
XMFLOAT3 vertex[] =
{
XMFLOAT3(-1.0f, -1.0f, -1.0f) ,
XMFLOAT3(-1.0f, +1.0f, -1.0f) ,
XMFLOAT3(+1.0f, +1.0f, -1.0f) ,
XMFLOAT3(+1.0f, -1.0f, -1.0f) ,
XMFLOAT3(-1.0f, -1.0f, +1.0f) ,
XMFLOAT3(-1.0f, +1.0f, +1.0f) ,
XMFLOAT3(+1.0f, +1.0f, +1.0f) ,
XMFLOAT3(+1.0f, -1.0f, +1.0f)
};
// vertex buffer
D3D11_BUFFER_DESC vbd;
vbd.Usage = D3D11_USAGE_IMMUTABLE;
vbd.ByteWidth = sizeof(XMFLOAT3) * 8;
vbd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
vbd.CPUAccessFlags = 0;
vbd.MiscFlags = 0;
vbd.StructureByteStride = 0;
D3D11_SUBRESOURCE_DATA vinitData;
vinitData.pSysMem = vertex;
HR(md3dDevice->CreateBuffer(&vbd, &vinitData, &mBoxVB));
XMFLOAT4 color[] =
{
(const float*)&Colors::White ,
(const float*)&Colors::Black ,
(const float*)&Colors::Red ,
(const float*)&Colors::Green ,
(const float*)&Colors::Blue ,
(const float*)&Colors::Yellow ,
(const float*)&Colors::Cyan ,
(const float*)&Colors::Magenta
};
// where the namespace Colors is defined like
namespace Colors
{
XMGLOBALCONST XMVECTORF32 White = {1.0f, 1.0f, 1.0f, 1.0f};
XMGLOBALCONST XMVECTORF32 Black = {0.0f, 0.0f, 0.0f, 1.0f};
XMGLOBALCONST XMVECTORF32 Red = {1.0f, 0.0f, 0.0f, 1.0f};
XMGLOBALCONST XMVECTORF32 Green = {0.0f, 1.0f, 0.0f, 1.0f};
XMGLOBALCONST XMVECTORF32 Blue = {0.0f, 0.0f, 1.0f, 1.0f};
XMGLOBALCONST XMVECTORF32 Yellow = {1.0f, 1.0f, 0.0f, 1.0f};
XMGLOBALCONST XMVECTORF32 Cyan = {0.0f, 1.0f, 1.0f, 1.0f};
XMGLOBALCONST XMVECTORF32 Magenta = {1.0f, 0.0f, 1.0f, 1.0f};
};
// color buffer
D3D11_BUFFER_DESC vbd2;
vbd2.Usage = D3D11_USAGE_IMMUTABLE;
vbd2.ByteWidth = sizeof(XMFLOAT4) * 8;
vbd2.BindFlags = D3D11_BIND_VERTEX_BUFFER;
vbd2.CPUAccessFlags = 0;
vbd2.MiscFlags = 0;
vbd2.StructureByteStride = 0;
D3D11_SUBRESOURCE_DATA initData2;
initData2.pSysMem = color;
// here is our problem:
HR(md3dDevice->CreateBuffer(&vbd2, &initData2, &mBoxVB2));
// inside DrawScene():
UINT stride[] = {sizeof(XMFLOAT3), sizeof(XMFLOAT4)};
UINT offset[] = {0,0};
md3dImmediateContext->IASetVertexBuffers(0, 2, buffers, stride, offset);
md3dImmediateContext->IASetIndexBuffer(mBoxIB, DXGI_FORMAT_R32_UINT, 0);
// and the shaders
cbuffer cbPerObject
{
float4x4 gWorldViewProj;
};
struct VertexIn
{
float3 PosL : POSITION;
float4 Color : COLOR;
};
struct VertexOut
{
float4 PosH : SV_POSITION;
float4 Color : COLOR;
};
VertexOut VS(VertexIn vin)
{
VertexOut vout;
vout.PosH = mul(float4(vin.PosL, 1.0f), gWorldViewProj);
vout.Color = vin.Color;
return vout;
}
float4 PS(VertexOut pin) : SV_Target
{
return pin.Color;
}
technique11 ColorTech
{
pass P0
{
SetVertexShader( CompileShader( vs_5_0, VS() ) );
SetGeometryShader( NULL );
SetPixelShader( CompileShader( ps_5_0, PS() ) );
}
}
What I'm doing wrong?
BuildFX():
DWORD shaderFlags = 0;
#if defined( DEBUG ) || defined( _DEBUG )
shaderFlags |= D3D10_SHADER_DEBUG;
shaderFlags |= D3D10_SHADER_SKIP_OPTIMIZATION;
#endif
ID3D10Blob* compiledShader = 0;
ID3D10Blob* compilationMsgs = 0;
HRESULT hr = D3DX11CompileFromFile(L"FX/color.fx", 0, 0, 0, "fx_5_0", shaderFlags,
0, 0, &compiledShader, &compilationMsgs, 0);
// compilationMsgs can store errors or warnings.
if( compilationMsgs != 0 )
{
MessageBoxA(0, (char*)compilationMsgs->GetBufferPointer(), 0, 0);
ReleaseCOM(compilationMsgs);
}
// Even if there are no compilationMsgs, check to make sure there were no other errors.
if(FAILED(hr))
{
DXTrace(__FILE__, (DWORD)__LINE__, hr, L"D3DX11CompileFromFile", true);
}
HR(D3DX11CreateEffectFromMemory(compiledShader->GetBufferPointer(), compiledShader->GetBufferSize(),
0, md3dDevice, &mFX));
// Done with compiled shader.
ReleaseCOM(compiledShader);
mTech = mFX->GetTechniqueByName("ColorTech");
mfxWorldViewProj = mFX->GetVariableByName("gWorldViewProj")->AsMatrix();

vbd2 is uninitialised.
You've copy/pasted the code from above and not changed vbd to vbd2.

Related

How to draw/render different shapes in DirectX 11

I have been trying to learn the basics of DirectX 11 programming using the MSDN tutorial05 sample code and I have run into an issue I cannot find a solution for on the internet (that I could see anyway). Basically I am trying to draw and render a player cube object, complete with user input, and pyramid-like objects that the player must collect.
My issue is that when I am rendering the scene, only the cube vertex (and indices) data is being read so all objects are cubes when they shouldn't be.
This is the function where the vertex data is made:
PyramidVertex Pyramid[] =
{
// Square base of the pyramid
{ XMFLOAT3( -0.5f, -0.5f, 0.5f), XMFLOAT4(0.0f, 1.0f, 0.0f, 1.0f) },
{ XMFLOAT3( 0.5f, -0.5f, 0.5f), XMFLOAT4(0.0f, 0.0f, 1.0f, 1.0f) },
{ XMFLOAT3(-0.5f, -0.5f, -0.5f), XMFLOAT4(1.0f, 0.0f, 0.0f, 1.0f) },
{ XMFLOAT3(0.5f, -0.5f, -0.5f), XMFLOAT4(0.0f, 1.0f, 1.0f, 1.0f) },
// The tip of the pyramid
{ XMFLOAT3(0.0f, 0.5f, 0.0f), XMFLOAT4(0.0f, 1.0f, 0.0f, 1.0f) },
};
D3D11_BUFFER_DESC bdP;
ZeroMemory(&bdP, sizeof(bdP));
bdP.Usage = D3D11_USAGE_DEFAULT;
bdP.ByteWidth = sizeof(PyramidVertex) * 5;
bdP.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bdP.CPUAccessFlags = 0;
D3D11_SUBRESOURCE_DATA InitPData;
ZeroMemory(&InitPData, sizeof(InitPData));
InitPData.pSysMem = Pyramid;
hr = g_pd3dDevice->CreateBuffer(&bdP, &InitPData, &g_pVertexBufferP);
if (FAILED(hr))
return hr;
// Set vertex buffer
UINT pStride = sizeof(PyramidVertex);
UINT pOffset = 1;
g_pImmediateContext->IASetVertexBuffers(0, 1, &g_pVertexBufferP, &pStride, &pOffset);
// create the index buffer
DWORD pIndex[] =
{
0, 2, 1,
1, 2, 3,
0, 1, 4,
1, 3, 4,
3, 2, 4,
2, 0, 4,
};
// create the index buffer
bdP.Usage = D3D11_USAGE_DYNAMIC;
bdP.ByteWidth = sizeof(DWORD) * 18;
bdP.BindFlags = D3D11_BIND_INDEX_BUFFER;
bdP.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
bdP.MiscFlags = 0;
InitPData.pSysMem = pIndex;
hr = g_pd3dDevice->CreateBuffer(&bdP, &InitPData, &g_pIndexBufferP);
if (FAILED(hr))
return hr;
// Set index buffer
g_pImmediateContext->IASetIndexBuffer(g_pIndexBufferP, DXGI_FORMAT_R16_UINT, 0);
// Set primitive topology
g_pImmediateContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
// Create vertex buffer
SimpleVertex vertices[] =
{
{ XMFLOAT3( -1.0f, 1.0f, -1.0f ), XMFLOAT4( 1.0f, 1.0f, 1.0f, 1.0f ) },
{ XMFLOAT3( 1.0f, 1.0f, -1.0f ), XMFLOAT4( 1.0f, 1.0f, 1.0f, 1.0f ) },
{ XMFLOAT3( 1.0f, 1.0f, 1.0f ), XMFLOAT4( 0.0f, 1.0f, 1.0f, 1.0f ) },
{ XMFLOAT3( -1.0f, 1.0f, 1.0f ), XMFLOAT4( .0f, 1.0f, 1.0f, 1.0f ) },
{ XMFLOAT3( -1.0f, -1.0f, -1.0f ), XMFLOAT4 (1.0f, 1.0f, 1.0f, 1.0f ) },
{ XMFLOAT3( 1.0f, -1.0f, -1.0f ), XMFLOAT4( 1.0f, 1.0f, 1.0f, 1.0f ) },
{ XMFLOAT3( 1.0f, -1.0f, 1.0f ), XMFLOAT4( 0.0f, 1.0f, 1.0f, 1.0f ) },
{ XMFLOAT3( -1.0f, -1.0f, 1.0f ), XMFLOAT4( .0f, 1.0f, 1.0f, 1.0f ) },
};
D3D11_BUFFER_DESC bd;
ZeroMemory( &bd, sizeof(bd) );
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = sizeof( SimpleVertex ) * 8;
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = 0;
D3D11_SUBRESOURCE_DATA InitData;
ZeroMemory( &InitData, sizeof(InitData) );
InitData.pSysMem = vertices;
hr = g_pd3dDevice->CreateBuffer( &bd, &InitData, &g_pVertexBuffer );
if( FAILED( hr ) )
return hr;
// Set vertex buffer
UINT stride = sizeof( SimpleVertex );
UINT offset = 0;
g_pImmediateContext->IASetVertexBuffers( 0, 1, &g_pVertexBuffer, &stride, &offset );
// Create index buffer
WORD indices[] =
{
3,1,0,
2,1,3,
0,5,4,
1,5,0,
3,4,7,
0,4,3,
1,6,5,
2,6,1,
2,7,6,
3,7,2,
6,4,5,
7,4,6,
};
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = sizeof( WORD ) * 36; // 36 vertices needed for 12 triangles in a triangle list
bd.BindFlags = D3D11_BIND_INDEX_BUFFER;
bd.CPUAccessFlags = 0;
InitData.pSysMem = indices;
hr = g_pd3dDevice->CreateBuffer( &bd, &InitData, &g_pIndexBuffer );
if( FAILED( hr ) )
return hr;
// Set index buffer
g_pImmediateContext->IASetIndexBuffer( g_pIndexBuffer, DXGI_FORMAT_R16_UINT, 0 );
// Set primitive topology
g_pImmediateContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
// Create the constant buffer
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = sizeof(ConstantBuffer);
bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
bd.CPUAccessFlags = 0;
hr = g_pd3dDevice->CreateBuffer( &bd, nullptr, &g_pConstantBuffer );
if( FAILED( hr ) )
return hr;
// Create the constant buffer
bdP.Usage = D3D11_USAGE_DEFAULT;
bdP.ByteWidth = sizeof(ConstantBuffer);
bdP.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
bdP.CPUAccessFlags = 0;
hr = g_pd3dDevice->CreateBuffer(&bdP, nullptr, &g_pConstantBufferP);
if (FAILED(hr))
return hr;
// Initialize the world matrix
g_Player = XMMatrixIdentity();
for (int i = 0; i < 10; ++i)
{
g_Shapes[i] = XMMatrixIdentity();
}
// Initialize the view matrix
XMVECTOR Eye = XMVectorSet( 0.0f, 1.0f, -5.0f, 0.0f );
XMVECTOR At = XMVectorSet( 0.0f, 1.0f, 0.0f, 0.0f );
XMVECTOR Up = XMVectorSet( 0.0f, 4.0f, 0.0f, 0.0f );
g_View = XMMatrixLookAtLH( Eye, At, Up );
// Initialize the projection matrix
g_Projection = XMMatrixPerspectiveFovLH( XM_PIDIV2, width / (FLOAT)height, 0.01f, 100.0f );
I believe that the issue is somewhere here and my theory is that the pyramid g_pImmediateContext is being overwritten when it comes to drawing the cubes. If this is the case then I have no clue on how to solve, or research, this problem. I has taken me an hour to figure out to put my code on this page properly but got weird results so I will leave a link to my Google Drive containing this code if someone wants to have an in-depth look (for whatever reason) at the code.
This is the render function:
//
// Clear the back buffer
//
g_pImmediateContext->ClearRenderTargetView(g_pRenderTargetView, Colors::Black);
//
// Clear the depth buffer to 1.0 (max depth)
//
g_pImmediateContext->ClearDepthStencilView(g_pDepthStencilView, D3D11_CLEAR_DEPTH, 1.0f, 0);
XMMATRIX mRotate = XMMatrixRotationZ(DXGame->playerUser->getRotation());
XMMATRIX mTranslate = XMMatrixTranslation(DXGame->playerUser->getXpos(), DXGame->playerUser->getYpos(), DXGame->playerUser->getZpos());
XMMATRIX mScale = XMMatrixScaling(0.7f, 0.7f, 0.7f);
g_Player = mScale * mRotate * mTranslate;
ConstantBuffer cb1;
cb1.mWorld = XMMatrixTranspose(g_Player);
cb1.mView = XMMatrixTranspose(g_View);
cb1.mProjection = XMMatrixTranspose(g_Projection);
g_pImmediateContext->UpdateSubresource(g_pConstantBuffer, 0, nullptr, &cb1, 0, 0);
g_pImmediateContext->VSSetShader(g_pVertexShader, nullptr, 0);
g_pImmediateContext->VSSetConstantBuffers(0, 1, &g_pConstantBuffer);
g_pImmediateContext->PSSetShader(g_pPixelShader, nullptr, 0);
g_pImmediateContext->DrawIndexed(36, 0, 0);
for (int i = 0; i < 10; i++)
{
XMMATRIX sRotate = XMMatrixRotationY((DXGame->pickUps[i].rotation += 0.001f));
XMMATRIX sTranslate = XMMatrixTranslation(DXGame->pickUps[i].xPos, DXGame->pickUps[i].yPos, DXGame->pickUps[i].zPos);
XMMATRIX sScale = XMMatrixScaling(0.2f, 0.2f, 0.2f);
g_Shapes[i] = sScale * sRotate * sTranslate;
ConstantBuffer constB;
constB.mWorld = XMMatrixTranspose(g_Shapes[i]);
constB.mView = XMMatrixTranspose(g_View);
constB.mProjection = XMMatrixTranspose(g_Projection);
g_pImmediateContext->UpdateSubresource(g_pConstantBufferP, 0, nullptr, &constB, 0, 0);
g_pImmediateContext->VSSetShader(g_pVertexShader, nullptr, 0);
g_pImmediateContext->VSSetConstantBuffers(0, 1, &g_pConstantBufferP);
g_pImmediateContext->PSSetShader(g_pPixelShader, nullptr, 0);
g_pImmediateContext->DrawIndexed(18, 0, 0);
}
g_pSwapChain->Present(0, 0);
Something I am also looking at is constant buffers and HLSL to see if that is an issue as well.
Please could someone at least point me in the right direction as this issue has bugged me for almost 2 months now (I left it this long because I wanted to figure it out for myself but now I am desperate for a solution).
Thank you for taking the time to read this post, sorry its so long but I needed to get as much info out there as possible in the hope that it is easier to read.
your calls to IASetIndexBuffer and IASetVertexBuffers are in your creation routines, They need to be in your render function (before to call the relevant Draw function, as those are attaching those buffers to the runtime before drawing)
They do not need to be in creation part at all (as in DirectX11 context eg building commands and device eg : creating resource are decoupled).
You should have, in the render loop:
// Set vertex buffer and index buffer for your cube
UINT stride = sizeof( SimpleVertex );
UINT offset = 0;
g_pImmediateContext->IASetVertexBuffers( 0, 1, &g_pVertexBuffer, &stride,
&offset );
// Set index buffer
g_pImmediateContext->IASetIndexBuffer( g_pIndexBuffer, DXGI_FORMAT_R16_UINT,
0 );
g_pImmediateContext->VSSetShader(g_pVertexShader, nullptr, 0);
g_pImmediateContext->VSSetConstantBuffers(0, 1, &g_pConstantBuffer);
g_pImmediateContext->PSSetShader(g_pPixelShader, nullptr, 0);
g_pImmediateContext->DrawIndexed(36, 0, 0);
and just before to draw all pyramids:
// Set vertex buffer and index buffer for pyramids as you will draw it 10 times, you can do it once just before the loop as geometry will not change
UINT stride = sizeof( SimpleVertex );
UINT offset = 0;
g_pImmediateContext->IASetVertexBuffers( 0, 1, &g_pVertexBuffer, &stride,
&offset );
// Set index buffer
g_pImmediateContext->IASetIndexBuffer(g_pIndexBufferP, DXGI_FORMAT_R16_UINT, 0);
for (int i = 0; i < 10; i++)
{
//Same draw code as before
}

Passing normal data to shader

I have written the simple code to render some objects with DirectX 11.
The position has been passed to shader correctly. However, the normals seem to be lost somewhere. I have changed the shader to see the normals' value as a color (just a debug purpose) and I get the black box (0,0,0 normals of every vertex?), in the right position:
Note that on the right bar I can see my NORMAL values (they are right!), but in the "locals" only position is set and the rest of values are NaN. Why?
The shader:
... //some constants
struct VertexInputType
{
float4 position : POSITION;
float2 tex : TEXCOORD;
float3 normal : NORMAL;
float3 tangent : TANGENT;
//float3 binormal : BINORMAL;
};
struct VS_OUTPUT
{
float4 Pos : SV_POSITION;
float4 worldPos : POSITION;
float2 TexCoord : TEXCOORD;
float3 normal : NORMAL;
float3 tangent : TANGENT;
};
//VS_OUTPUT VS(float4 inPos : POSITION, float2 inTexCoord : TEXCOORD, float3 inNormal : NORMAL, float3 tangent : TANGENT)
VS_OUTPUT VS(VertexInputType input)
{
VS_OUTPUT output;
output.Pos = mul(input.position, WVP);
output.worldPos = mul(input.position, World);
output.normal = input.normal;
return output;
}
float4 PS(VS_OUTPUT input) : SV_TARGET
{
return float4(input.normal*100, 1);
}
technique10 RENDER
{
pass P0
{
SetVertexShader( CompileShader( vs_4_0, VS() ) );
// SetGeometryShader( CompileShader( gs_4_0, GS() ) );
SetPixelShader( CompileShader( ps_4_0, PS() ) );
SetBlendState( SrcAlphaBlendingAdd, float4( 0.0f, 0.0f, 0.0f, 0.0f ), 0xFFFFFFFF );
}
}
During rendering I use:
UINT stride = sizeof(Vertex);
UINT offset = 0;
context->IASetVertexBuffers(0, 1, &buffers->vertexBuffer, &stride, &offset); //set vertex buffer
context->IASetIndexBuffer(buffers->indexBuffer, DXGI_FORMAT_R16_UINT, 0); //set index buffer
for(int i=0; i<structure.subsets.size(); i++){
//set matrices
DirectX::XMFLOAT4X4 view = camera->getView();
DirectX::XMMATRIX camView = XMLoadFloat4x4(&view);
DirectX::XMFLOAT4X4 projection = camera->getProjection();
DirectX::XMMATRIX camProjection = XMLoadFloat4x4(&projection);
DirectX::XMMATRIX worldViewProjectionMatrix = objectWorldMatrix * camView * camProjection;
//set the constants per object
ConstantBufferStructure constantsPerObject;
constantsPerObject.worldViewProjection = XMMatrixTranspose(worldViewProjectionMatrix);
constantsPerObject.world = XMMatrixTranspose(objectWorldMatrix);
//bind constants per object to constant buffer and send it to vertex and pixel shaders
context->UpdateSubresource(constantBuffer, 0, NULL, &constantsPerObject, 0, 0);
context->VSSetConstantBuffers(0, 1, &constantBuffer);
context->PSSetConstantBuffers(0, 1, &constantBuffer);
//context->PSSetSamplers(0,1,&m_sampleState);
context->RSSetState(RSCullDefault);
int start = structure.subsets[i]->getVertexIndexStart();
int count = structure.subsets[i]->getVertexIndexAmmount();
context->DrawIndexed(count, start, 0);
}
And for the shader initializing the :
// Create the vertex shader
hr = device->CreateVertexShader( pVSBlob->GetBufferPointer(), pVSBlob->GetBufferSize(), NULL, &vertexShader );
//create the input layout
VertexLayoutDescirption layoutDescription; //will gives us the data that is corresponding with Vertex structure
hr = device->CreateInputLayout(layoutDescription.layout, layoutDescription.entriesCount, pVSBlob->GetBufferPointer(), pVSBlob->GetBufferSize(), &*vertexLayout );
pVSBlob->Release();
context->IASetInputLayout( *vertexLayout );
//compile the pixel shader
ID3DBlob* pPSBlob = NULL;
CompileShaderFromFile( C::toWChar(C::toString(pixelShaderFileName)), "PS", "ps_4_0", &pPSBlob );
// Create the pixel shader
hr = device->CreatePixelShader( pPSBlob->GetBufferPointer(), pPSBlob->GetBufferSize(), NULL, &pixelShader );
Where:
struct Vertex{//vertex structure
Vertex() : weightCount(0){}
Vertex(float x, float y, float z, float u, float v, float nx, float ny, float nz, float tx, float ty, float tz)
: position(x, y, z), textureCoordinates(u, v), normals(nx, ny, nz), tangents(tx, ty, tz), weightCount(0){}
Vertex(DirectX::XMFLOAT3 position, DirectX::XMFLOAT2 textureCoordinates, DirectX::XMFLOAT3 normals, DirectX::XMFLOAT3 biTangents)
: position(position), textureCoordinates(textureCoordinates), normals(normals), tangents(tangents), weightCount(0){}
DirectX::XMFLOAT3 position;
DirectX::XMFLOAT2 textureCoordinates;
DirectX::XMFLOAT3 normals;
DirectX::XMFLOAT3 tangents;
DirectX::XMFLOAT3 biTangents;
//will not be sent to shader (and used only by skinned models)
int startWeightIndex; //index in Subset::weights (from 0 to X for each subset separately)
int weightCount; //=0 means that it's not skinned vertex
};
/* will be used by Shader, should be corresponding th Vertex (the data that we want to transfer to shader) */
struct VertexLayoutDescirption{
D3D11_INPUT_ELEMENT_DESC layout[4]; //the input layout
UINT entriesCount; //the numer of elements of layout[], will be also used by Shader
VertexLayoutDescirption(){
entriesCount = 4;
for(UINT i=0; i<entriesCount; i++){
layout[i].SemanticIndex = 0;
layout[i].Format = DXGI_FORMAT_R32G32B32_FLOAT;
layout[i].InputSlot = 0;
layout[i].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
layout[i].InstanceDataStepRate = 0;
}
layout[0].SemanticName ="POSITION";
layout[0].AlignedByteOffset = 0; //(not D3D11_APPEND_ALIGNED_ELEMENT)
layout[1].SemanticName ="TEXCOORD";
layout[1].AlignedByteOffset = 12; //or D3D11_APPEND_ALIGNED_ELEMENT
layout[2].SemanticName ="NORMAL";
layout[2].AlignedByteOffset = 20; //or D3D11_APPEND_ALIGNED_ELEMENT
layout[3].SemanticName ="TANGENT";
layout[3].AlignedByteOffset = 32; //or D3D11_APPEND_ALIGNED_ELEMENT
}
};
The box model:
/*top vertices*/
structure.vertices[0] = Vertex(/*pos*/ -1.0f, +1.0f, -1.0f, /*uv*/ 1.0f, 1.0f, /*normals*/ 0.0f, 1.0f, -1.0f, /*tan*/ +1.0f, -1.0f, 1.0f);
structure.vertices[1] = Vertex(/*pos*/ +1.0f, +1.0f, -1.0f, /*uv*/ 0.0f, 1.0f, /*normals*/ 0.0f, 1.0f, +1.0f, /*tan*/ +1.0f, -1.0f, 1.0f);
structure.vertices[2] = Vertex(/*pos*/ +1.0f, +1.0f, +1.0f, /*uv*/ 1.0f, 0.0f, /*normals*/ 0.0f, 1.0f, +1.0f, /*tan*/ +1.0f, +1.0f, 1.0f);
structure.vertices[3] = Vertex(/*pos*/ -1.0f, +1.0f, +1.0f, /*uv*/ 0.0f, 0.0f, /*normals*/ 0.0f, 1.0f, -1.0f, /*tan*/ +1.0f, +1.0f, 1.0f);
/*bottom vertices*/
structure.vertices[4] = Vertex(/*pos*/ -1.0f, -1.0f, -1.0f, /*uv*/ 1.0f, 0.0f, /*normals*/ 0.0f, 1.0f, -1.0f, /*tan*/ -1.0f, -1.0f, 1.0f);
structure.vertices[5] = Vertex(/*pos*/ +1.0f, -1.0f, -1.0f, /*uv*/ 0.0f, 0.0f, /*normals*/ 0.0f, 1.0f, +1.0f, /*tan*/ -1.0f, -1.0f, 1.0f);
structure.vertices[6] = Vertex(/*pos*/ +1.0f, -1.0f, +1.0f, /*uv*/ 1.0f, 1.0f, /*normals*/ 0.0f, 1.0f, +1.0f, /*tan*/ -1.0f, +1.0f, 1.0f);
structure.vertices[7] = Vertex(/*pos*/ -1.0f, -1.0f, +1.0f, /*uv*/ 0.0f, 1.0f, /*normals*/ 0.0f, 1.0f, -1.0f, /*tan*/ -1.0f, +1.0f, 1.0f);
buffers = new Buffers();
D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DEFAULT; //D3D11_USAGE_DYNAMIC
bd.ByteWidth = sizeof(Vertex) * structure.getVerticesCount();
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = 0;
D3D11_SUBRESOURCE_DATA InitData;
ZeroMemory(&InitData, sizeof(InitData));
InitData.pSysMem = structure.vertices;
if(device->CreateBuffer(&bd, &InitData, &buffers->vertexBuffer) != S_OK){
return false;
}
... //index buffer
Why the normals has not been passed to shader while the position was? What did I miss?
In the shader file try to use float3 normal : TEXCOORD1; or float3 normal : TEXCOORD2; or any Semantic TEXCOORD with any Index instead of float3 normal : NORMAL; in VS_OUTPUT structure,

DirectX11 Shader Compilation Issue

I'm working on a simple DirectX application to display couple of triangles together as Tetrahedron,which Keeps crashing at start.I checked with VS2012 Debugger the error occurs at the stage where Shader is supposed to be compiled from a .fx file,So I assume it's got something to do with the shader.I have no idea what I did wrong.Below is the code of the Shader I'm Using.Assistance required.
struct Light
{
float3 pos;
float4 ambient;
float4 diffuse;
};
cbuffer cbPerFrame
{
Light light;
};
cbuffer cbPerObject
{
float4x4 WVP;
float4x4 World;
};
struct VS_OUTPUT
{
float4 Pos : SV_POSITION;
float4 worldPos : POSITION;
float4 color : COLOR;
float3 normal : NORMAL;
};
VS_OUTPUT VS(float4 inPos : POSITION, float4 inColor : COLOR, float3 normal : NORMAL)
{
VS_OUTPUT output;
output.Pos = mul(inPos, WVP);
output.worldPos = mul(inPos, World);
output.normal = mul(normal, World);
output.color = inColor;
return output;
}
float4 PS(VS_OUTPUT input) : SV_TARGET
{
input.normal = normalize(input.normal);
float4 diffuse = input.color;
float3 finalColor = float3(0.0f, 0.0f, 0.0f);
//Create the vector between light position and pixels position
float3 lightToPixelVec = light.pos - input.worldPos;
//Add the ambient light
float3 finalAmbient = diffuse * light.ambient;
//Turn lightToPixelVec into a unit length vector describing
//the pixels direction from the lights position
lightToPixelVec /= d;
//Calculate how much light the pixel gets by the angle
//in which the light strikes the pixels surface
float howMuchLight = dot(lightToPixelVec, input.normal);
//If light is striking the front side of the pixel
if( howMuchLight > 0.0f )
{
//Add light to the finalColor of the pixel
finalColor += diffuse * light.diffuse;
}
//make sure the values are between 1 and 0, and add the ambient
finalColor = saturate(finalColor + finalAmbient);
//Return Final Color
return float4(finalColor, diffuse.a);
}
Here's the part where the Compilation is supposed to happen
bool InitScene()
{
//Compile Shaders from shader file
hr = D3DX11CompileFromFile(L"Effects.fx", 0, 0, "VS", "vs_4_0", 0, 0, 0,
&VS_Buffer, 0, 0);
if(FAILED(hr))
{
MessageBox(0, L"Shader Compilation - Failed",
L"Error", MB_OK);
return false;
}
hr = D3DX11CompileFromFile(L"Effects.fx", 0, 0, "PS", "ps_4_0", 0, 0, 0,
&PS_Buffer, 0, 0);
//Create the Shader Objects
hr = d3d11Device->CreateVertexShader(VS_Buffer->GetBufferPointer(),
VS_Buffer->GetBufferSize(), NULL, &VS);
hr = d3d11Device->CreatePixelShader(PS_Buffer->GetBufferPointer(),
PS_Buffer->GetBufferSize(), NULL, &PS);
//Set Vertex and Pixel Shaders
d3d11DevCon->VSSetShader(VS, 0, 0);
d3d11DevCon->PSSetShader(PS, 0, 0);
light.pos = XMFLOAT3(0.25f, 0.5f, -1.0f);
light.ambient = XMFLOAT4(0.2f, 0.2f, 0.2f, 1.0f);
light.diffuse = XMFLOAT4(1.0f, 1.0f, 1.0f, 1.0f);
//X,Y,Z,R,G,B,A,NX,NY,NZ
//Create the vertex buffer
Vertex v[] =
{
Vertex( 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f),
Vertex( -0.5f, -0.5f, 0.5f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f),
Vertex( 0.5f, -0.5f, 0.5f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f),
Vertex( 0.0f, -0.5f, 0.5f, 1.0f, 0.0f, 0.0f, 1.0f,0.0f, 1.0f, 0.0f)
};
DWORD indices[] = {
//Front
0, 1, 2,
//Left
0, 1, 3,
//Right
0, 2, 3,
//Bottom
1, 2, 3
};
D3D11_BUFFER_DESC indexBufferDesc;
ZeroMemory( &indexBufferDesc, sizeof(indexBufferDesc) );
indexBufferDesc.Usage = D3D11_USAGE_DEFAULT;
indexBufferDesc.ByteWidth = sizeof(DWORD) * 4 * 3;
indexBufferDesc.BindFlags = D3D11_BIND_INDEX_BUFFER;
indexBufferDesc.CPUAccessFlags = 0;
indexBufferDesc.MiscFlags = 0;
D3D11_SUBRESOURCE_DATA iinitData;
iinitData.pSysMem = indices;
d3d11Device->CreateBuffer(&indexBufferDesc, &iinitData, &IndexBuffer);
//d3d11DevCon->IASetIndexBuffer( squareIndexBuffer, DXGI_FORMAT_R32_UINT, 0);
D3D11_BUFFER_DESC vertexBufferDesc;
ZeroMemory( &vertexBufferDesc, sizeof(vertexBufferDesc) );
vertexBufferDesc.Usage = D3D11_USAGE_DEFAULT;
vertexBufferDesc.ByteWidth = sizeof( Vertex ) * 4;
vertexBufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
vertexBufferDesc.CPUAccessFlags = 0;
vertexBufferDesc.MiscFlags = 0;
D3D11_SUBRESOURCE_DATA vertexBufferData;
ZeroMemory( &vertexBufferData, sizeof(vertexBufferData) );
vertexBufferData.pSysMem = v;
hr = d3d11Device->CreateBuffer( &vertexBufferDesc, &vertexBufferData, &VertBuffer);
//Set the vertex buffer
UINT stride = sizeof( Vertex );
UINT offset = 0;
//d3d11DevCon->IASetVertexBuffers( 0, 1, &squareVertBuffer, &stride, &offset );
//Create the Input Layout
hr = d3d11Device->CreateInputLayout( layout,
numElements,VS_Buffer->GetBufferPointer(),
VS_Buffer->GetBufferSize(), &vertLayout );
//Set the Input Layout
d3d11DevCon->IASetInputLayout( vertLayout );
//Set Primitive Topology
d3d11DevCon->IASetPrimitiveTopology( D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST );
//Create the Viewport
D3D11_VIEWPORT viewport;
ZeroMemory(&viewport, sizeof(D3D11_VIEWPORT));
viewport.TopLeftX = 0;
viewport.TopLeftY = 0;
viewport.Width = width;
viewport.Height = height;
viewport.MinDepth = 0.0f;
viewport.MaxDepth = 2.0f;
//Set the Viewport
d3d11DevCon->RSSetViewports(1, &viewport);
//Create the buffer to send to the cbuffer in effect file
D3D11_BUFFER_DESC cbbd;
ZeroMemory(&cbbd, sizeof(D3D11_BUFFER_DESC));
cbbd.Usage = D3D11_USAGE_DEFAULT;
cbbd.ByteWidth = sizeof(cbPerObject);
cbbd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
cbbd.CPUAccessFlags = 0;
cbbd.MiscFlags = 0;
hr = d3d11Device->CreateBuffer(&cbbd, NULL, &cbPerObjectBuffer);
ZeroMemory(&cbbd, sizeof(D3D11_BUFFER_DESC));
cbbd.Usage = D3D11_USAGE_DEFAULT;
cbbd.ByteWidth = sizeof(cbPerFrame);
cbbd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
cbbd.CPUAccessFlags = 0;
cbbd.MiscFlags = 0;
hr = d3d11Device->CreateBuffer(&cbbd, NULL, &cbPerFrameBuffer);
//Camera information
camPosition = XMVectorSet( -5.0f, 5.0f, 8.0f, 0.0f );
camTarget = XMVectorSet( 0.0f, 0.0f, 0.0f, 0.0f );
camUp = XMVectorSet( 0.0f, 1.0f, 0.0f, 0.0f );
//Set the View matrix
camView = XMMatrixLookAtLH( camPosition, camTarget, camUp );
//Set the Projection matrix
camProjection = XMMatrixPerspectiveFovLH( 0.4f*3.14f, width/height, 1.0f, 1000.0f);
return true;
}
Your Vertex shader compiles, but your Pixel Shader doesn't:
lightToPixelVec /= d;
d is undefined
Since in your code you only check for VS compilation result, that makes sense that it crashes when trying to create Pixel Shader (as you send an invalid pointer).
As mentionned in comment, it's also important to check feature level, in case you develop for desktop/laptop pretty much any device should be at least feature level 10.1
In case of phone your should use one of those profile (whichever matches best):
ps_4_0_level_9_1, ps_4_0_level_9_2, ps_4_0_level_9_3

D3D11_BUFFER_DESC bytewidth "not working"

I'm having some issues with Direct3D. Namely the vertex buffer and its ByteWidth member.
I want to draw two quads, so I create my vertex buffer like so:
struct Vertex
{
XMFLOAT3 pos;
XMFLOAT3 normal;
XMFLOAT2 texCoord;
};
....
void GameWindow::CreateVerticesAndBuffer()
{
Vertex vertices[] =
{
{ XMFLOAT3(-1.0f, -1.0f, -1.0f), XMFLOAT3(0.0f, 0.0f, -1.0f), XMFLOAT2(0.0f, 1.0f) },
{ XMFLOAT3(-1.0f, 1.0f, -1.0f), XMFLOAT3(0.0f, 0.0f, -1.0f), XMFLOAT2(0.0f, 0.0f) },
{ XMFLOAT3(1.0f, 1.0f, -1.0f), XMFLOAT3(0.0f, 0.0f, -1.0f), XMFLOAT2(1.0f, 0.0f) },
{ XMFLOAT3(1.0f, -1.0f, -1.0f), XMFLOAT3(0.0f, 0.0f, -1.0f), XMFLOAT2(1.0f, 1.0f) },
{ XMFLOAT3(-1.0f, -1.0f, 1.0f), XMFLOAT3(0.0f, 0.0f, -1.0f), XMFLOAT2(0.0f, 1.0f) },
{ XMFLOAT3(-1.0f, 1.0f, 1.0f), XMFLOAT3(0.0f, 0.0f, -1.0f), XMFLOAT2(0.0f, 0.0f) },
{ XMFLOAT3(1.0f, 1.0f, 1.0f), XMFLOAT3(0.0f, 0.0f, -1.0f), XMFLOAT2(1.0f, 0.0f) },
{ XMFLOAT3(1.0f, -1.0f, 1.0f), XMFLOAT3(0.0f, 0.0f, -1.0f), XMFLOAT2(1.0f, 1.0f) }
};
D3D11_BUFFER_DESC desc;
ZeroMemory(&desc, sizeof(desc));
desc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
desc.CPUAccessFlags = 0;
desc.Usage = D3D11_USAGE_DEFAULT; //Will not ever change after creation (??)
desc.MiscFlags = 0;
desc.ByteWidth = sizeof(Vertex) * 8;
desc.StructureByteStride = 0;
D3D11_SUBRESOURCE_DATA data;
ZeroMemory(&data, sizeof(data));
data.pSysMem = vertices;
HR(device->CreateBuffer(
&desc,
&data,
&vertexBuffer));
UINT stride = sizeof(Vertex);
UINT offset = 0;
deviceContext->IASetVertexBuffers(0, 1, &vertexBuffer, &stride, &offset);
}
This code produces some weird results as seen here. The back face is mirrored for some reason.
But, if I change
desc.ByteWidth = sizeof(Vertex) * 8
to
desc.ByteWidth = sizeof(Vertex) * 9
It is drawn correctly.
Does anyone have any idea why this happens?
EDIT: Here is my CreateInputLayout:
D3D11_INPUT_ELEMENT_DESC inputDesc[] = {
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEX", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0 }
};
hRes = device->CreateInputLayout(
inputDesc,
ARRAYSIZE(inputDesc),
vertexShaderSource->GetBufferPointer(),
vertexShaderSource->GetBufferSize(),
&vertexInputLayout);
You have specified DXGI_FORMAT_R32G32B32_FLOAT (float3) for your TEX member. Change it to DXGI_FORMAT_R32G32_FLOAT and it should work.

DirectX : Nothing is drawn on screen

I'm trying to develop a program using DirectX (10) to display on screen.
Thing is, it displays nothing but the color I use to clear the backbuffer.
(I apologize for the quite big chunks of code that follow).
Here is my rendering function :
void DXEngine::renderOneFrame()
{
//First, we clear the back buffer
m_device->ClearRenderTargetView(m_renderTargetView,D3DXCOLOR(0.0f, 0.125f, 0.3f, 1.0f));
//Then, we clear the depth buffer
m_device->ClearDepthStencilView(m_depthStencilView,D3D10_CLEAR_DEPTH,1.0f, 0);
//Update variables
m_worldVariable->SetMatrix((float*)&m_world);
m_viewVariable->SetMatrix((float*)&m_view);
m_projectionVariable->SetMatrix((float*)&m_projection);
//Render the cube
D3D10_TECHNIQUE_DESC techDesc;
m_technique->GetDesc(&techDesc);
for(UINT pass = 0; pass < techDesc.Passes ; pass++){
m_technique->GetPassByIndex(pass)->Apply(0);
m_device->DrawIndexed(36,0,0);
}
m_swapChain->Present(0,0);
}
It is exactly the same as the 5th tutorial on DirectX10 in the DirectX SDK (June 2010) under the "Samples" folder, except it's encapsulated in an object.
My scene is initialized as follow :
HRESULT DXEngine::initStaticScene()
{
HRESULT hr;
//Vertex buffer creation and initialization
Vertex1Pos1Col vertices [] =
{
{ D3DXVECTOR3( -1.0f, 1.0f, -1.0f ), D3DXVECTOR4( 0.0f, 0.0f, 1.0f, 1.0f ) },
{ D3DXVECTOR3( 1.0f, 1.0f, -1.0f ), D3DXVECTOR4( 0.0f, 1.0f, 0.0f, 1.0f ) },
{ D3DXVECTOR3( 1.0f, 1.0f, 1.0f ), D3DXVECTOR4( 0.0f, 1.0f, 1.0f, 1.0f ) },
{ D3DXVECTOR3( -1.0f, 1.0f, 1.0f ), D3DXVECTOR4( 1.0f, 0.0f, 0.0f, 1.0f ) },
{ D3DXVECTOR3( -1.0f, -1.0f, -1.0f ), D3DXVECTOR4( 1.0f, 0.0f, 1.0f, 1.0f ) },
{ D3DXVECTOR3( 1.0f, -1.0f, -1.0f ), D3DXVECTOR4( 1.0f, 1.0f, 0.0f, 1.0f ) },
{ D3DXVECTOR3( 1.0f, -1.0f, 1.0f ), D3DXVECTOR4( 1.0f, 1.0f, 1.0f, 1.0f ) },
{ D3DXVECTOR3( -1.0f, -1.0f, 1.0f ), D3DXVECTOR4( 0.0f, 0.0f, 0.0f, 1.0f ) },
};
D3D10_BUFFER_DESC desc;
desc.Usage = D3D10_USAGE_DEFAULT;
desc.ByteWidth = sizeof(Vertex1Pos1Col) * 8;
desc.BindFlags = D3D10_BIND_VERTEX_BUFFER;
desc.CPUAccessFlags = 0;
desc.MiscFlags = 0;
D3D10_SUBRESOURCE_DATA data;
data.pSysMem = vertices;
hr = m_device->CreateBuffer(&desc,&data,&m_vertexBuffer);
if(FAILED(hr)){
MessageBox(NULL,TEXT("Vertex buffer creation failed"), TEXT("Error"),MB_OK);
return hr;
}
UINT stride = sizeof(Vertex1Pos1Col);
UINT offset = 0;
m_device->IASetVertexBuffers(0,1,&m_vertexBuffer,&stride,&offset);
//Index buffer creation and initialization
DWORD indices[] =
{
3,1,0,
2,1,3,
0,5,4,
1,5,0,
3,4,7,
0,4,3,
1,6,5,
2,6,1,
2,7,6,
3,7,2,
6,4,5,
7,4,6,
};
desc.Usage = D3D10_USAGE_DEFAULT;
desc.ByteWidth = sizeof(DWORD) * 36;
desc.BindFlags = D3D10_BIND_INDEX_BUFFER;
desc.CPUAccessFlags = 0;
desc.MiscFlags = 0;
data.pSysMem = vertices;
hr = m_device->CreateBuffer(&desc,&data,&m_indexBuffer);
if(FAILED(hr)){
MessageBox(NULL,TEXT("Index buffer creation failed"), TEXT("Error"),MB_OK);
return hr;
}
m_device->IASetIndexBuffer(m_indexBuffer,DXGI_FORMAT_R32_FLOAT,0);
//Set the primitive topology, i.e. how indices should be interpreted (here, as a triangle list)
m_device->IASetPrimitiveTopology(D3D10_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
D3DXMatrixIdentity(&m_world);
D3DXVECTOR3 eye(0.0f, 1.0f, -10.0f);
D3DXVECTOR3 at(0.0f, 1.0f, 0.0f);
D3DXVECTOR3 up(0.0f, 1.0f, 0.0f);
D3DXMatrixLookAtLH(&m_view, &eye, &at, &up);
D3DXMatrixPerspectiveFovLH(&m_projection, (float)D3DX_PI * 0.25f, m_width/(FLOAT)m_height, 0.1f, 100.0f);
return hr;
}
Once again, it's the exact same code (but encapsulated) as the tutorial I mentionned earlier.
When I open the Tutorial Visual Studio Solution in my IDE, it works and displays nicely what is described in the scene, but when I try to run my "encapsulated" version of this code, nothing shows up but the background color...
Note : My windows message pumps works fine, I can even handle user inputs the way I want, everything's fine. My application performs correctly my engine initialization (I check every single returned error code and there's nothing else but S_OK codes).
I have no clue where to search now. I've checked my code times and times again and it's exactly the same as the tutorial, I've checked that everything I encapsulate is set and accessed correctly, etc, but I still can't display anything else than the background color...
I was wondering if anyone here could have an idea of what could possibly cause this, or at least hints on where to look for...
EDIT: Effect file used :
//--------------------------------------------------------------------------------------
// File: Tutorial05.fx
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//--------------------------------------------------------------------------------------
//--------------------------------------------------------------------------------------
// Constant Buffer Variables
//--------------------------------------------------------------------------------------
matrix World;
matrix View;
matrix Projection;
//--------------------------------------------------------------------------------------
struct VS_INPUT
{
float4 Pos : POSITION;
float4 Color : COLOR;
};
struct PS_INPUT
{
float4 Pos : SV_POSITION;
float4 Color : COLOR;
};
//--------------------------------------------------------------------------------------
// Vertex Shader
//--------------------------------------------------------------------------------------
PS_INPUT VS( VS_INPUT input )
{
PS_INPUT output = (PS_INPUT)0;
output.Pos = mul( input.Pos, World );
output.Pos = mul( output.Pos, View );
output.Pos = mul( output.Pos, Projection );
output.Color = input.Color;
return output;
}
//--------------------------------------------------------------------------------------
// Pixel Shader
//--------------------------------------------------------------------------------------
float4 PS( PS_INPUT input) : SV_Target
{
return input.Color;
}
//--------------------------------------------------------------------------------------
technique10 Render
{
pass P0
{
SetVertexShader( CompileShader( vs_4_0, VS() ) );
SetGeometryShader( NULL );
SetPixelShader( CompileShader( ps_4_0, PS() ) );
}
}
I think, that this can be an error:
Input assembler stage of D3D (10 and 11) pipeline is always waiting for DXGI_FORMAT_***_UINT format for index buffers. MSDN proves this:
A DXGI_FORMAT that specifies the format of the data in the index
buffer. The only formats allowed for index buffer data are 16-bit
(DXGI_FORMAT_R16_UINT) and 32-bit (DXGI_FORMAT_R32_UINT) integers.
Then look at your code that binds your buffer to IA:
m_device->IASetIndexBuffer(m_indexBuffer, DXGI_FORMAT_R32_FLOAT, 0);
I think you should use DXGI_FORMAT_R32_UINT for your case, like this:
m_device->IASetIndexBuffer(m_indexBuffer, DXGI_FORMAT_R32_UINT, 0);