DirectX : Nothing is drawn on screen - c++

I'm trying to develop a program using DirectX (10) to display on screen.
Thing is, it displays nothing but the color I use to clear the backbuffer.
(I apologize for the quite big chunks of code that follow).
Here is my rendering function :
void DXEngine::renderOneFrame()
{
//First, we clear the back buffer
m_device->ClearRenderTargetView(m_renderTargetView,D3DXCOLOR(0.0f, 0.125f, 0.3f, 1.0f));
//Then, we clear the depth buffer
m_device->ClearDepthStencilView(m_depthStencilView,D3D10_CLEAR_DEPTH,1.0f, 0);
//Update variables
m_worldVariable->SetMatrix((float*)&m_world);
m_viewVariable->SetMatrix((float*)&m_view);
m_projectionVariable->SetMatrix((float*)&m_projection);
//Render the cube
D3D10_TECHNIQUE_DESC techDesc;
m_technique->GetDesc(&techDesc);
for(UINT pass = 0; pass < techDesc.Passes ; pass++){
m_technique->GetPassByIndex(pass)->Apply(0);
m_device->DrawIndexed(36,0,0);
}
m_swapChain->Present(0,0);
}
It is exactly the same as the 5th tutorial on DirectX10 in the DirectX SDK (June 2010) under the "Samples" folder, except it's encapsulated in an object.
My scene is initialized as follow :
HRESULT DXEngine::initStaticScene()
{
HRESULT hr;
//Vertex buffer creation and initialization
Vertex1Pos1Col vertices [] =
{
{ D3DXVECTOR3( -1.0f, 1.0f, -1.0f ), D3DXVECTOR4( 0.0f, 0.0f, 1.0f, 1.0f ) },
{ D3DXVECTOR3( 1.0f, 1.0f, -1.0f ), D3DXVECTOR4( 0.0f, 1.0f, 0.0f, 1.0f ) },
{ D3DXVECTOR3( 1.0f, 1.0f, 1.0f ), D3DXVECTOR4( 0.0f, 1.0f, 1.0f, 1.0f ) },
{ D3DXVECTOR3( -1.0f, 1.0f, 1.0f ), D3DXVECTOR4( 1.0f, 0.0f, 0.0f, 1.0f ) },
{ D3DXVECTOR3( -1.0f, -1.0f, -1.0f ), D3DXVECTOR4( 1.0f, 0.0f, 1.0f, 1.0f ) },
{ D3DXVECTOR3( 1.0f, -1.0f, -1.0f ), D3DXVECTOR4( 1.0f, 1.0f, 0.0f, 1.0f ) },
{ D3DXVECTOR3( 1.0f, -1.0f, 1.0f ), D3DXVECTOR4( 1.0f, 1.0f, 1.0f, 1.0f ) },
{ D3DXVECTOR3( -1.0f, -1.0f, 1.0f ), D3DXVECTOR4( 0.0f, 0.0f, 0.0f, 1.0f ) },
};
D3D10_BUFFER_DESC desc;
desc.Usage = D3D10_USAGE_DEFAULT;
desc.ByteWidth = sizeof(Vertex1Pos1Col) * 8;
desc.BindFlags = D3D10_BIND_VERTEX_BUFFER;
desc.CPUAccessFlags = 0;
desc.MiscFlags = 0;
D3D10_SUBRESOURCE_DATA data;
data.pSysMem = vertices;
hr = m_device->CreateBuffer(&desc,&data,&m_vertexBuffer);
if(FAILED(hr)){
MessageBox(NULL,TEXT("Vertex buffer creation failed"), TEXT("Error"),MB_OK);
return hr;
}
UINT stride = sizeof(Vertex1Pos1Col);
UINT offset = 0;
m_device->IASetVertexBuffers(0,1,&m_vertexBuffer,&stride,&offset);
//Index buffer creation and initialization
DWORD indices[] =
{
3,1,0,
2,1,3,
0,5,4,
1,5,0,
3,4,7,
0,4,3,
1,6,5,
2,6,1,
2,7,6,
3,7,2,
6,4,5,
7,4,6,
};
desc.Usage = D3D10_USAGE_DEFAULT;
desc.ByteWidth = sizeof(DWORD) * 36;
desc.BindFlags = D3D10_BIND_INDEX_BUFFER;
desc.CPUAccessFlags = 0;
desc.MiscFlags = 0;
data.pSysMem = vertices;
hr = m_device->CreateBuffer(&desc,&data,&m_indexBuffer);
if(FAILED(hr)){
MessageBox(NULL,TEXT("Index buffer creation failed"), TEXT("Error"),MB_OK);
return hr;
}
m_device->IASetIndexBuffer(m_indexBuffer,DXGI_FORMAT_R32_FLOAT,0);
//Set the primitive topology, i.e. how indices should be interpreted (here, as a triangle list)
m_device->IASetPrimitiveTopology(D3D10_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
D3DXMatrixIdentity(&m_world);
D3DXVECTOR3 eye(0.0f, 1.0f, -10.0f);
D3DXVECTOR3 at(0.0f, 1.0f, 0.0f);
D3DXVECTOR3 up(0.0f, 1.0f, 0.0f);
D3DXMatrixLookAtLH(&m_view, &eye, &at, &up);
D3DXMatrixPerspectiveFovLH(&m_projection, (float)D3DX_PI * 0.25f, m_width/(FLOAT)m_height, 0.1f, 100.0f);
return hr;
}
Once again, it's the exact same code (but encapsulated) as the tutorial I mentionned earlier.
When I open the Tutorial Visual Studio Solution in my IDE, it works and displays nicely what is described in the scene, but when I try to run my "encapsulated" version of this code, nothing shows up but the background color...
Note : My windows message pumps works fine, I can even handle user inputs the way I want, everything's fine. My application performs correctly my engine initialization (I check every single returned error code and there's nothing else but S_OK codes).
I have no clue where to search now. I've checked my code times and times again and it's exactly the same as the tutorial, I've checked that everything I encapsulate is set and accessed correctly, etc, but I still can't display anything else than the background color...
I was wondering if anyone here could have an idea of what could possibly cause this, or at least hints on where to look for...
EDIT: Effect file used :
//--------------------------------------------------------------------------------------
// File: Tutorial05.fx
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//--------------------------------------------------------------------------------------
//--------------------------------------------------------------------------------------
// Constant Buffer Variables
//--------------------------------------------------------------------------------------
matrix World;
matrix View;
matrix Projection;
//--------------------------------------------------------------------------------------
struct VS_INPUT
{
float4 Pos : POSITION;
float4 Color : COLOR;
};
struct PS_INPUT
{
float4 Pos : SV_POSITION;
float4 Color : COLOR;
};
//--------------------------------------------------------------------------------------
// Vertex Shader
//--------------------------------------------------------------------------------------
PS_INPUT VS( VS_INPUT input )
{
PS_INPUT output = (PS_INPUT)0;
output.Pos = mul( input.Pos, World );
output.Pos = mul( output.Pos, View );
output.Pos = mul( output.Pos, Projection );
output.Color = input.Color;
return output;
}
//--------------------------------------------------------------------------------------
// Pixel Shader
//--------------------------------------------------------------------------------------
float4 PS( PS_INPUT input) : SV_Target
{
return input.Color;
}
//--------------------------------------------------------------------------------------
technique10 Render
{
pass P0
{
SetVertexShader( CompileShader( vs_4_0, VS() ) );
SetGeometryShader( NULL );
SetPixelShader( CompileShader( ps_4_0, PS() ) );
}
}

I think, that this can be an error:
Input assembler stage of D3D (10 and 11) pipeline is always waiting for DXGI_FORMAT_***_UINT format for index buffers. MSDN proves this:
A DXGI_FORMAT that specifies the format of the data in the index
buffer. The only formats allowed for index buffer data are 16-bit
(DXGI_FORMAT_R16_UINT) and 32-bit (DXGI_FORMAT_R32_UINT) integers.
Then look at your code that binds your buffer to IA:
m_device->IASetIndexBuffer(m_indexBuffer, DXGI_FORMAT_R32_FLOAT, 0);
I think you should use DXGI_FORMAT_R32_UINT for your case, like this:
m_device->IASetIndexBuffer(m_indexBuffer, DXGI_FORMAT_R32_UINT, 0);

Related

Passing normal data to shader

I have written the simple code to render some objects with DirectX 11.
The position has been passed to shader correctly. However, the normals seem to be lost somewhere. I have changed the shader to see the normals' value as a color (just a debug purpose) and I get the black box (0,0,0 normals of every vertex?), in the right position:
Note that on the right bar I can see my NORMAL values (they are right!), but in the "locals" only position is set and the rest of values are NaN. Why?
The shader:
... //some constants
struct VertexInputType
{
float4 position : POSITION;
float2 tex : TEXCOORD;
float3 normal : NORMAL;
float3 tangent : TANGENT;
//float3 binormal : BINORMAL;
};
struct VS_OUTPUT
{
float4 Pos : SV_POSITION;
float4 worldPos : POSITION;
float2 TexCoord : TEXCOORD;
float3 normal : NORMAL;
float3 tangent : TANGENT;
};
//VS_OUTPUT VS(float4 inPos : POSITION, float2 inTexCoord : TEXCOORD, float3 inNormal : NORMAL, float3 tangent : TANGENT)
VS_OUTPUT VS(VertexInputType input)
{
VS_OUTPUT output;
output.Pos = mul(input.position, WVP);
output.worldPos = mul(input.position, World);
output.normal = input.normal;
return output;
}
float4 PS(VS_OUTPUT input) : SV_TARGET
{
return float4(input.normal*100, 1);
}
technique10 RENDER
{
pass P0
{
SetVertexShader( CompileShader( vs_4_0, VS() ) );
// SetGeometryShader( CompileShader( gs_4_0, GS() ) );
SetPixelShader( CompileShader( ps_4_0, PS() ) );
SetBlendState( SrcAlphaBlendingAdd, float4( 0.0f, 0.0f, 0.0f, 0.0f ), 0xFFFFFFFF );
}
}
During rendering I use:
UINT stride = sizeof(Vertex);
UINT offset = 0;
context->IASetVertexBuffers(0, 1, &buffers->vertexBuffer, &stride, &offset); //set vertex buffer
context->IASetIndexBuffer(buffers->indexBuffer, DXGI_FORMAT_R16_UINT, 0); //set index buffer
for(int i=0; i<structure.subsets.size(); i++){
//set matrices
DirectX::XMFLOAT4X4 view = camera->getView();
DirectX::XMMATRIX camView = XMLoadFloat4x4(&view);
DirectX::XMFLOAT4X4 projection = camera->getProjection();
DirectX::XMMATRIX camProjection = XMLoadFloat4x4(&projection);
DirectX::XMMATRIX worldViewProjectionMatrix = objectWorldMatrix * camView * camProjection;
//set the constants per object
ConstantBufferStructure constantsPerObject;
constantsPerObject.worldViewProjection = XMMatrixTranspose(worldViewProjectionMatrix);
constantsPerObject.world = XMMatrixTranspose(objectWorldMatrix);
//bind constants per object to constant buffer and send it to vertex and pixel shaders
context->UpdateSubresource(constantBuffer, 0, NULL, &constantsPerObject, 0, 0);
context->VSSetConstantBuffers(0, 1, &constantBuffer);
context->PSSetConstantBuffers(0, 1, &constantBuffer);
//context->PSSetSamplers(0,1,&m_sampleState);
context->RSSetState(RSCullDefault);
int start = structure.subsets[i]->getVertexIndexStart();
int count = structure.subsets[i]->getVertexIndexAmmount();
context->DrawIndexed(count, start, 0);
}
And for the shader initializing the :
// Create the vertex shader
hr = device->CreateVertexShader( pVSBlob->GetBufferPointer(), pVSBlob->GetBufferSize(), NULL, &vertexShader );
//create the input layout
VertexLayoutDescirption layoutDescription; //will gives us the data that is corresponding with Vertex structure
hr = device->CreateInputLayout(layoutDescription.layout, layoutDescription.entriesCount, pVSBlob->GetBufferPointer(), pVSBlob->GetBufferSize(), &*vertexLayout );
pVSBlob->Release();
context->IASetInputLayout( *vertexLayout );
//compile the pixel shader
ID3DBlob* pPSBlob = NULL;
CompileShaderFromFile( C::toWChar(C::toString(pixelShaderFileName)), "PS", "ps_4_0", &pPSBlob );
// Create the pixel shader
hr = device->CreatePixelShader( pPSBlob->GetBufferPointer(), pPSBlob->GetBufferSize(), NULL, &pixelShader );
Where:
struct Vertex{//vertex structure
Vertex() : weightCount(0){}
Vertex(float x, float y, float z, float u, float v, float nx, float ny, float nz, float tx, float ty, float tz)
: position(x, y, z), textureCoordinates(u, v), normals(nx, ny, nz), tangents(tx, ty, tz), weightCount(0){}
Vertex(DirectX::XMFLOAT3 position, DirectX::XMFLOAT2 textureCoordinates, DirectX::XMFLOAT3 normals, DirectX::XMFLOAT3 biTangents)
: position(position), textureCoordinates(textureCoordinates), normals(normals), tangents(tangents), weightCount(0){}
DirectX::XMFLOAT3 position;
DirectX::XMFLOAT2 textureCoordinates;
DirectX::XMFLOAT3 normals;
DirectX::XMFLOAT3 tangents;
DirectX::XMFLOAT3 biTangents;
//will not be sent to shader (and used only by skinned models)
int startWeightIndex; //index in Subset::weights (from 0 to X for each subset separately)
int weightCount; //=0 means that it's not skinned vertex
};
/* will be used by Shader, should be corresponding th Vertex (the data that we want to transfer to shader) */
struct VertexLayoutDescirption{
D3D11_INPUT_ELEMENT_DESC layout[4]; //the input layout
UINT entriesCount; //the numer of elements of layout[], will be also used by Shader
VertexLayoutDescirption(){
entriesCount = 4;
for(UINT i=0; i<entriesCount; i++){
layout[i].SemanticIndex = 0;
layout[i].Format = DXGI_FORMAT_R32G32B32_FLOAT;
layout[i].InputSlot = 0;
layout[i].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
layout[i].InstanceDataStepRate = 0;
}
layout[0].SemanticName ="POSITION";
layout[0].AlignedByteOffset = 0; //(not D3D11_APPEND_ALIGNED_ELEMENT)
layout[1].SemanticName ="TEXCOORD";
layout[1].AlignedByteOffset = 12; //or D3D11_APPEND_ALIGNED_ELEMENT
layout[2].SemanticName ="NORMAL";
layout[2].AlignedByteOffset = 20; //or D3D11_APPEND_ALIGNED_ELEMENT
layout[3].SemanticName ="TANGENT";
layout[3].AlignedByteOffset = 32; //or D3D11_APPEND_ALIGNED_ELEMENT
}
};
The box model:
/*top vertices*/
structure.vertices[0] = Vertex(/*pos*/ -1.0f, +1.0f, -1.0f, /*uv*/ 1.0f, 1.0f, /*normals*/ 0.0f, 1.0f, -1.0f, /*tan*/ +1.0f, -1.0f, 1.0f);
structure.vertices[1] = Vertex(/*pos*/ +1.0f, +1.0f, -1.0f, /*uv*/ 0.0f, 1.0f, /*normals*/ 0.0f, 1.0f, +1.0f, /*tan*/ +1.0f, -1.0f, 1.0f);
structure.vertices[2] = Vertex(/*pos*/ +1.0f, +1.0f, +1.0f, /*uv*/ 1.0f, 0.0f, /*normals*/ 0.0f, 1.0f, +1.0f, /*tan*/ +1.0f, +1.0f, 1.0f);
structure.vertices[3] = Vertex(/*pos*/ -1.0f, +1.0f, +1.0f, /*uv*/ 0.0f, 0.0f, /*normals*/ 0.0f, 1.0f, -1.0f, /*tan*/ +1.0f, +1.0f, 1.0f);
/*bottom vertices*/
structure.vertices[4] = Vertex(/*pos*/ -1.0f, -1.0f, -1.0f, /*uv*/ 1.0f, 0.0f, /*normals*/ 0.0f, 1.0f, -1.0f, /*tan*/ -1.0f, -1.0f, 1.0f);
structure.vertices[5] = Vertex(/*pos*/ +1.0f, -1.0f, -1.0f, /*uv*/ 0.0f, 0.0f, /*normals*/ 0.0f, 1.0f, +1.0f, /*tan*/ -1.0f, -1.0f, 1.0f);
structure.vertices[6] = Vertex(/*pos*/ +1.0f, -1.0f, +1.0f, /*uv*/ 1.0f, 1.0f, /*normals*/ 0.0f, 1.0f, +1.0f, /*tan*/ -1.0f, +1.0f, 1.0f);
structure.vertices[7] = Vertex(/*pos*/ -1.0f, -1.0f, +1.0f, /*uv*/ 0.0f, 1.0f, /*normals*/ 0.0f, 1.0f, -1.0f, /*tan*/ -1.0f, +1.0f, 1.0f);
buffers = new Buffers();
D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DEFAULT; //D3D11_USAGE_DYNAMIC
bd.ByteWidth = sizeof(Vertex) * structure.getVerticesCount();
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = 0;
D3D11_SUBRESOURCE_DATA InitData;
ZeroMemory(&InitData, sizeof(InitData));
InitData.pSysMem = structure.vertices;
if(device->CreateBuffer(&bd, &InitData, &buffers->vertexBuffer) != S_OK){
return false;
}
... //index buffer
Why the normals has not been passed to shader while the position was? What did I miss?
In the shader file try to use float3 normal : TEXCOORD1; or float3 normal : TEXCOORD2; or any Semantic TEXCOORD with any Index instead of float3 normal : NORMAL; in VS_OUTPUT structure,

DirectX11 Shader Compilation Issue

I'm working on a simple DirectX application to display couple of triangles together as Tetrahedron,which Keeps crashing at start.I checked with VS2012 Debugger the error occurs at the stage where Shader is supposed to be compiled from a .fx file,So I assume it's got something to do with the shader.I have no idea what I did wrong.Below is the code of the Shader I'm Using.Assistance required.
struct Light
{
float3 pos;
float4 ambient;
float4 diffuse;
};
cbuffer cbPerFrame
{
Light light;
};
cbuffer cbPerObject
{
float4x4 WVP;
float4x4 World;
};
struct VS_OUTPUT
{
float4 Pos : SV_POSITION;
float4 worldPos : POSITION;
float4 color : COLOR;
float3 normal : NORMAL;
};
VS_OUTPUT VS(float4 inPos : POSITION, float4 inColor : COLOR, float3 normal : NORMAL)
{
VS_OUTPUT output;
output.Pos = mul(inPos, WVP);
output.worldPos = mul(inPos, World);
output.normal = mul(normal, World);
output.color = inColor;
return output;
}
float4 PS(VS_OUTPUT input) : SV_TARGET
{
input.normal = normalize(input.normal);
float4 diffuse = input.color;
float3 finalColor = float3(0.0f, 0.0f, 0.0f);
//Create the vector between light position and pixels position
float3 lightToPixelVec = light.pos - input.worldPos;
//Add the ambient light
float3 finalAmbient = diffuse * light.ambient;
//Turn lightToPixelVec into a unit length vector describing
//the pixels direction from the lights position
lightToPixelVec /= d;
//Calculate how much light the pixel gets by the angle
//in which the light strikes the pixels surface
float howMuchLight = dot(lightToPixelVec, input.normal);
//If light is striking the front side of the pixel
if( howMuchLight > 0.0f )
{
//Add light to the finalColor of the pixel
finalColor += diffuse * light.diffuse;
}
//make sure the values are between 1 and 0, and add the ambient
finalColor = saturate(finalColor + finalAmbient);
//Return Final Color
return float4(finalColor, diffuse.a);
}
Here's the part where the Compilation is supposed to happen
bool InitScene()
{
//Compile Shaders from shader file
hr = D3DX11CompileFromFile(L"Effects.fx", 0, 0, "VS", "vs_4_0", 0, 0, 0,
&VS_Buffer, 0, 0);
if(FAILED(hr))
{
MessageBox(0, L"Shader Compilation - Failed",
L"Error", MB_OK);
return false;
}
hr = D3DX11CompileFromFile(L"Effects.fx", 0, 0, "PS", "ps_4_0", 0, 0, 0,
&PS_Buffer, 0, 0);
//Create the Shader Objects
hr = d3d11Device->CreateVertexShader(VS_Buffer->GetBufferPointer(),
VS_Buffer->GetBufferSize(), NULL, &VS);
hr = d3d11Device->CreatePixelShader(PS_Buffer->GetBufferPointer(),
PS_Buffer->GetBufferSize(), NULL, &PS);
//Set Vertex and Pixel Shaders
d3d11DevCon->VSSetShader(VS, 0, 0);
d3d11DevCon->PSSetShader(PS, 0, 0);
light.pos = XMFLOAT3(0.25f, 0.5f, -1.0f);
light.ambient = XMFLOAT4(0.2f, 0.2f, 0.2f, 1.0f);
light.diffuse = XMFLOAT4(1.0f, 1.0f, 1.0f, 1.0f);
//X,Y,Z,R,G,B,A,NX,NY,NZ
//Create the vertex buffer
Vertex v[] =
{
Vertex( 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f),
Vertex( -0.5f, -0.5f, 0.5f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f),
Vertex( 0.5f, -0.5f, 0.5f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f),
Vertex( 0.0f, -0.5f, 0.5f, 1.0f, 0.0f, 0.0f, 1.0f,0.0f, 1.0f, 0.0f)
};
DWORD indices[] = {
//Front
0, 1, 2,
//Left
0, 1, 3,
//Right
0, 2, 3,
//Bottom
1, 2, 3
};
D3D11_BUFFER_DESC indexBufferDesc;
ZeroMemory( &indexBufferDesc, sizeof(indexBufferDesc) );
indexBufferDesc.Usage = D3D11_USAGE_DEFAULT;
indexBufferDesc.ByteWidth = sizeof(DWORD) * 4 * 3;
indexBufferDesc.BindFlags = D3D11_BIND_INDEX_BUFFER;
indexBufferDesc.CPUAccessFlags = 0;
indexBufferDesc.MiscFlags = 0;
D3D11_SUBRESOURCE_DATA iinitData;
iinitData.pSysMem = indices;
d3d11Device->CreateBuffer(&indexBufferDesc, &iinitData, &IndexBuffer);
//d3d11DevCon->IASetIndexBuffer( squareIndexBuffer, DXGI_FORMAT_R32_UINT, 0);
D3D11_BUFFER_DESC vertexBufferDesc;
ZeroMemory( &vertexBufferDesc, sizeof(vertexBufferDesc) );
vertexBufferDesc.Usage = D3D11_USAGE_DEFAULT;
vertexBufferDesc.ByteWidth = sizeof( Vertex ) * 4;
vertexBufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
vertexBufferDesc.CPUAccessFlags = 0;
vertexBufferDesc.MiscFlags = 0;
D3D11_SUBRESOURCE_DATA vertexBufferData;
ZeroMemory( &vertexBufferData, sizeof(vertexBufferData) );
vertexBufferData.pSysMem = v;
hr = d3d11Device->CreateBuffer( &vertexBufferDesc, &vertexBufferData, &VertBuffer);
//Set the vertex buffer
UINT stride = sizeof( Vertex );
UINT offset = 0;
//d3d11DevCon->IASetVertexBuffers( 0, 1, &squareVertBuffer, &stride, &offset );
//Create the Input Layout
hr = d3d11Device->CreateInputLayout( layout,
numElements,VS_Buffer->GetBufferPointer(),
VS_Buffer->GetBufferSize(), &vertLayout );
//Set the Input Layout
d3d11DevCon->IASetInputLayout( vertLayout );
//Set Primitive Topology
d3d11DevCon->IASetPrimitiveTopology( D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST );
//Create the Viewport
D3D11_VIEWPORT viewport;
ZeroMemory(&viewport, sizeof(D3D11_VIEWPORT));
viewport.TopLeftX = 0;
viewport.TopLeftY = 0;
viewport.Width = width;
viewport.Height = height;
viewport.MinDepth = 0.0f;
viewport.MaxDepth = 2.0f;
//Set the Viewport
d3d11DevCon->RSSetViewports(1, &viewport);
//Create the buffer to send to the cbuffer in effect file
D3D11_BUFFER_DESC cbbd;
ZeroMemory(&cbbd, sizeof(D3D11_BUFFER_DESC));
cbbd.Usage = D3D11_USAGE_DEFAULT;
cbbd.ByteWidth = sizeof(cbPerObject);
cbbd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
cbbd.CPUAccessFlags = 0;
cbbd.MiscFlags = 0;
hr = d3d11Device->CreateBuffer(&cbbd, NULL, &cbPerObjectBuffer);
ZeroMemory(&cbbd, sizeof(D3D11_BUFFER_DESC));
cbbd.Usage = D3D11_USAGE_DEFAULT;
cbbd.ByteWidth = sizeof(cbPerFrame);
cbbd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
cbbd.CPUAccessFlags = 0;
cbbd.MiscFlags = 0;
hr = d3d11Device->CreateBuffer(&cbbd, NULL, &cbPerFrameBuffer);
//Camera information
camPosition = XMVectorSet( -5.0f, 5.0f, 8.0f, 0.0f );
camTarget = XMVectorSet( 0.0f, 0.0f, 0.0f, 0.0f );
camUp = XMVectorSet( 0.0f, 1.0f, 0.0f, 0.0f );
//Set the View matrix
camView = XMMatrixLookAtLH( camPosition, camTarget, camUp );
//Set the Projection matrix
camProjection = XMMatrixPerspectiveFovLH( 0.4f*3.14f, width/height, 1.0f, 1000.0f);
return true;
}
Your Vertex shader compiles, but your Pixel Shader doesn't:
lightToPixelVec /= d;
d is undefined
Since in your code you only check for VS compilation result, that makes sense that it crashes when trying to create Pixel Shader (as you send an invalid pointer).
As mentionned in comment, it's also important to check feature level, in case you develop for desktop/laptop pretty much any device should be at least feature level 10.1
In case of phone your should use one of those profile (whichever matches best):
ps_4_0_level_9_1, ps_4_0_level_9_2, ps_4_0_level_9_3

Diectx11 Pointing to vertex buffer from another function/ Trying to use multiple textures

Edit/ Update: Put in my most recent code, and asking a new question about texturing if you still have time to help me.
Original problem: I need to have the vertex buffer in its own function, I'm trying to make it with variables for vertices so I can run an array of randomly generated co-ordinates through it and result in many instances of cubes which I can control the size of.
Your advice set me on the right track and I was able to make the vertex buffer work in a separate function as desired. I may have set myself up for problems later on so I'm trying to show as much relevant code as possible just in case.
New problem: My next step is to do what I just did but drawing a different set of cubes (Friendlies, so different size, which is why I wanted to make the buffer more dynamic so I can re-use it for everything).
I think I can manage that part fine, but first I need to figure out how to run multiple textures so I can tell which is which (Also because onscreen text will be done by texturing squares with pictures of letter/ numbers.)
Here is the code involved:
struct VERTEX {FLOAT X, Y, Z; D3DXVECTOR3 Normal; FLOAT U, V;};
void InitGraphics()
{
// create the vertex buffer
D3D11_BUFFER_DESC bd;
D3D11_MAPPED_SUBRESOURCE ms;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DYNAMIC;
bd.ByteWidth = sizeof(VERTEX) * 24; // size is the VERTEX struct * amount of vertices stored
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
dev->CreateBuffer(&bd, NULL, &pVBuffer); // create the buffer
// create the index buffer out of DWORDs
DWORD IndexList[] =
{
0,1,2,3,
4,5,6,7,
8,9,10,11,
12,13,14,15,
16,17,18,19,
20,21,22,23,
};
// create the index buffer
bd.Usage = D3D11_USAGE_DYNAMIC;
bd.ByteWidth = sizeof(DWORD) * 24; // Changed to match the amount of indices used
bd.BindFlags = D3D11_BIND_INDEX_BUFFER;
bd.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
bd.MiscFlags = 0;
dev->CreateBuffer(&bd, NULL, &pIBuffer);
devcon->Map(pIBuffer, NULL, D3D11_MAP_WRITE_DISCARD, NULL, &ms);
memcpy(ms.pData, IndexList, sizeof(IndexList));
devcon->Unmap(pIBuffer, NULL);
D3DX11CreateShaderResourceViewFromFile
(dev, // the Direct3D device
L"Wood.png", // load Wood.png in the local folder
NULL, // no additional information
NULL, // no multithreading
&pTexture, // address of the shader-resource-view
NULL); // no multithreading
}
void RenderFrame(void)
{
...
devcon->UpdateSubresource(pCBuffer, 0, 0, &cBuffer, 0, 0);
devcon->PSSetShaderResources(0, 1, &pTexture);
...
DrawStuff();
}
I was able to follow your directions and bring the memcpy line into the other function as seen below but had to bring a couple other lines along with it to make it work. I included more of the code this time to show what else is in the InitGraphics function as my next problem is trying to figure out how to use multiple textures.
The vertex buffer now looks like this:
void VertBuffer()
{
VERTEX VertList[] =
{
{(vVX - vS + 0.0f), (- vS + 0.0f), (vVZ - vS + 0.0f), D3DXVECTOR3(0.0f, 0.0f, -1.0f), 0.0f, 0.0f},
{(vVX - vS + 0.0f), (vS + 0.0f), (vVZ - vS + 0.0f), D3DXVECTOR3(0.0f, 0.0f, -1.0f), 0.0f, 1.0f},
{(vVX + vS + 0.0f), (- vS + 0.0f), (vVZ - vS + 0.0f), D3DXVECTOR3(0.0f, 0.0f, -1.0f), 1.0f, 0.0f},
{(vVX + vS + 0.0f), (vS + 0.0f), (vVZ - vS + 0.0f), D3DXVECTOR3(0.0f, 0.0f, -1.0f), 1.0f, 1.0f}, // side 1
...
};
D3D11_MAPPED_SUBRESOURCE ms;
devcon->Map(pVBuffer, NULL, D3D11_MAP_WRITE_DISCARD, NULL, &ms);
memcpy(ms.pData, VertList, sizeof(VertList));
devcon->Unmap(pVBuffer, NULL);
}
This is where vVX and vVZ are co-ordinates randomly generated and stored in an array and then vS is to manipulate the size of the cube. When I last posted I was still having problems with it because I managed to get the vertex buffer working in its own function, but I could still only call it once at the beginning which meant none of the variables took place. I tried putting it in DrawStuff() but that caused the program to crash after 3-6 seconds. Since then, I have absolutely no idea what I changed or edited but somehow the problem became fixed so now I have a working draw function which looks like this and calls the vertex buffer in every cycle to constantly update it on the locations of the cubes.
void DrawStuff()
{
for (j = 0; j < 10; j++) // Draw 10 Creeps
{
for (int i = 0; i < 6; i++)
{
vS = 2; // Creep size
vVX = aCMgr [j][0];
vVZ = aCMgr [j][1];
VertBuffer();
devcon->DrawIndexed(4, i * 4, 0);
}
}
}
So that seems to be working great now and I'm just going to make multiple of these. One for bad cubes (Creeps), one for the player + friendly cube, and one for lots of squares which will be textured to make up a rudimentary GUI.
After 12 hours of Google searching and re-reading the tutorial website as well as my own code, I've got as far as learning that I need to change the array size in D3D11_TEXTURE2D_DESC and then run the part in InitGraphics multiple times to load up each texture but I still cannot for the life of me figure out at what point to control applying different textures to different objects.
Here's (I think) all the code I have relating to textures:
ID3D11ShaderResourceView *pTexture; // The pointer to the texture shader
void InitD3D(HWND hWnd)
{
...
D3D11_TEXTURE2D_DESC texd;
ZeroMemory(&texd, sizeof(texd));
texd.Width = 512;
texd.Height = 512;
texd.ArraySize = 3;
texd.MipLevels = 1;
texd.SampleDesc.Count = 4;
texd.Format = DXGI_FORMAT_D32_FLOAT;
texd.BindFlags = D3D11_BIND_DEPTH_STENCIL;
ID3D11Texture2D *pDepthBuffer;
dev->CreateTexture2D(&texd, NULL, &pDepthBuffer);
...
}
I changed ArraySize to 3, assuming I will have 3 different images which will be used to texture everything. From my understanding, I need to run D3DX11CreateShaderResourceViewFromFile three times, once for each texture? Where would I go from here?
You can put the Vertex array and memcpy together in a same function and call this function in InitGraphics().
void InitVertexBuffer()
{
VERTEX VertList[] =
{
{-1.0f, -1.0f, -1.0f, D3DXVECTOR3(0.0f, 0.0f, -1.0f), 0.0f, 0.0f},
{-1.0f, 1.0f, -1.0f, D3DXVECTOR3(0.0f, 0.0f, -1.0f), 0.0f, 1.0f},
{1.0f, -1.0f, -1.0f, D3DXVECTOR3(0.0f, 0.0f, -1.0f), 1.0f, 0.0f},
{1.0f, 1.0f, -1.0f, D3DXVECTOR3(0.0f, 0.0f, -1.0f), 1.0f, 1.0f},
};
// Some code else....
memcpy(ms.pData, VertList, sizeof(Vertlist));
}
void InitGraphics()
{
// ... Code to initialized D3D11
InitVertexBuffer();
}
Another thing I want to point is in DirectX11, you don't necessary need to use memcpy, you can bind the vertex data before creating vertex buffer, as below.
// The vertex format
struct SimpleVertex
{
DirectX::XMFLOAT3 Pos; // Position
DirectX::XMFLOAT3 Color; // color
};
VOID InitVertexBuffer()
{
// Create the vertex buffer
SimpleVertex vertices[] =
{
{ XMFLOAT3( -1.0f, 1.0f, -1.0f ), XMFLOAT3( 0.0f, 0.0f, 1.0f) },
{ XMFLOAT3( 1.0f, 1.0f, -1.0f ), XMFLOAT3( 0.0f, 1.0f, 0.0f) },
{ XMFLOAT3( 1.0f, 1.0f, 1.0f ), XMFLOAT3( 0.0f, 1.0f, 1.0f) },
{ XMFLOAT3(-1.0f, 1.0f, 1.0f ), XMFLOAT3( 1.0f, 0.0f, 0.0f) },
{ XMFLOAT3(-1.0f, -1.0f, -1.0f ), XMFLOAT3( 1.0f, 0.0f, 1.0f) },
{ XMFLOAT3( 1.0f, -1.0f, -1.0f ), XMFLOAT3( 1.0f, 1.0f, 0.0f) },
{ XMFLOAT3( 1.0f, -1.0f, 1.0f ), XMFLOAT3( 1.0f, 1.0f, 1.0f) },
{ XMFLOAT3(-1.0f, -1.0f, 1.0f ), XMFLOAT3( 0.0f, 0.0f, 0.0f) },
};
// Vertex Buffer
D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = sizeof(vertices);
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = 0;
// copy vertex buffer data
D3D11_SUBRESOURCE_DATA initData;
ZeroMemory(&initData, sizeof(initData));
initData.pSysMem = vertices; ////////////////// Bind your vertex data here.
HRESULT hr = g_pd3dDevice->CreateBuffer(&bd, &initData, &g_pVertexBuffer);
if(FAILED(hr))
{
MessageBox(NULL, L"Create vertex buffer failed", L"Error", 0);
}
}

What can I do not to calculate buffers in every render(), for DirectX 11?

I am writing my own 'Engine' class to make it simpler to use DirectX 11 and to learn it. That 'Engine' has a list of objects to render. Some of them are 'Primitives' (own Box class).
The problem is that Box::render() creates a buffers every function call.
Can I change something with it not to calculate them every function call?
Maybe I should calculate the g_pIndexBuffer just once in constructor and keep it for each Box? But what with the g_pVertexBuffer and g_pConstantBuffer?
I'm not only asking for the box, but more general - the situation will change when I will want to draw other Primitives or meshes? (for Box I think I could calculate g_pVertexBuffer once, and than just scale object with matrixs).
The code looks now like this (I have skip some parts of code here, the code is based on tutorial from DirectX SDK):
Box::render(
XMFLOAT4X4 &viewM,
XMFLOAT4X4 &projectionM,
ID3D11Buffer* g_pConstantBuffer, ID3D11DeviceContext* g_pImmediateContext,
ID3D11VertexShader* g_pVertexShader,
ID3D11PixelShader* g_pPixelShader,
ID3D11Device * g_pd3dDevice,
ID3D11Buffer* g_pIndexBuffer){
...
// Create vertex buffer
SimpleVertex vertices[] =
{
{ XMFLOAT3( -1.0f, 1.0f, -1.0f ), XMFLOAT4( 0.0f, 0.0f, 1.0f, 1.0f ) },
{ XMFLOAT3( 1.0f, 1.0f, -1.0f ), XMFLOAT4( 0.0f, 1.0f, 0.0f, 1.0f ) },
{ XMFLOAT3( 1.0f, 1.0f, 1.0f ), XMFLOAT4( 0.0f, 1.0f, 1.0f, 1.0f ) },
{ XMFLOAT3( -1.0f, 1.0f, 1.0f ), XMFLOAT4( 1.0f, 0.0f, 0.0f, 1.0f ) },
{ XMFLOAT3( -1.0f, -1.0f, -1.0f ), XMFLOAT4( 1.0f, 0.0f, 1.0f, 1.0f ) },
{ XMFLOAT3( 1.0f, -1.0f, -1.0f ), XMFLOAT4( 1.0f, 1.0f, 0.0f, 1.0f ) },
{ XMFLOAT3( 1.0f, -1.0f, 1.0f ), XMFLOAT4( 1.0f, 1.0f, 1.0f, 1.0f ) },
{ XMFLOAT3( -1.0f, -1.0f, 1.0f ), XMFLOAT4( 0.0f, 0.0f, 0.0f, 1.0f ) },
};
D3D11_BUFFER_DESC bd;
ZeroMemory( &bd, sizeof(bd) );
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = sizeof( SimpleVertex ) * 8;
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = 0;
D3D11_SUBRESOURCE_DATA InitData;
ZeroMemory( &InitData, sizeof(InitData) );
InitData.pSysMem = vertices;
ID3D11Buffer* g_pVertexBuffer;
HRESULT hr = g_pd3dDevice->CreateBuffer( &bd, &InitData, &g_pVertexBuffer );
// Set vertex buffer
UINT stride = sizeof( SimpleVertex );
UINT offset = 0;
g_pImmediateContext->IASetVertexBuffers( 0, 1, &g_pVertexBuffer, &stride, &offset);
WORD indices[] =
{
3,1,0, 2,1,3,
0,5,4, 1,5,0,
3,4,7, 0,4,3,
1,6,5, 2,6,1,
2,7,6, 3,7,2,
6,4,5, 7,4,6,
};
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = sizeof( WORD ) * 36; // 36 vertices needed for 12 triangles in a triangle list
bd.BindFlags = D3D11_BIND_INDEX_BUFFER;
bd.CPUAccessFlags = 0;
InitData.pSysMem = indices;
hr = g_pd3dDevice->CreateBuffer( &bd, &InitData, &g_pIndexBuffer );
// Set index buffer
g_pImmediateContext->IASetIndexBuffer( g_pIndexBuffer, DXGI_FORMAT_R16_UINT, 0 );
// Set primitive topology
g_pImmediateContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
// Create the constant buffer
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = sizeof(ConstantBuffer2);
bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
bd.CPUAccessFlags = 0;
hr = g_pd3dDevice->CreateBuffer( &bd, NULL, &g_pConstantBuffer );
...
XMMATRIX mSpin = XMMatrixRotationZ( a );
XMMATRIX mOrbit = XMMatrixRotationY( b );
XMMATRIX mTranslate = XMMatrixTranslation( c, d, e );
XMMATRIX mScale = XMMatrixScaling( f, g, h );
XMMATRIX g_World = mScale * mSpin * mTranslate * mOrbit;
ConstantBuffer2 cb1;
cb1.mWorld = XMMatrixTranspose( g_World );
XMMATRIX g_View = XMLoadFloat4x4(&viewM);
XMMATRIX g_Projection = XMLoadFloat4x4(&projectionM);
cb1.mView = XMMatrixTranspose( g_View );
cb1.mProjection = XMMatrixTranspose( g_Projection );
g_pImmediateContext->UpdateSubresource( g_pConstantBuffer, 0, NULL, &cb1, 0, 0 );
g_pImmediateContext->VSSetShader( g_pVertexShader, NULL, 0 );
g_pImmediateContext->VSSetConstantBuffers( 0, 1, &g_pConstantBuffer );
g_pImmediateContext->PSSetShader( g_pPixelShader, NULL, 0 );
g_pImmediateContext->DrawIndexed( 36, 0, 0 );
}
The answer to your question depends on the type of buffer. And, of course, on the type of application you are creating.
Vertex and index buffers should be created only once and reused as often as possible. A good place for the creation is the constructor of the class that represents your objects. Furthermore, if there is shared geometry between the objects, you could consider creating a geometry class (something like the Mesh in versions up to DX 9). Then you create the buffers once in the constructor of the geometry class and reference the appropriate geometry instances from your objects.
Of course, there are situations, where especially vertex buffers can change throughout the application. But even then, you should create them once at the beginning and then rewrite them when it is necessary. But that should occur as little as possible. Many changed to vertex buffers can be made in an appropriate shader.
For constant buffers, this is a bit trickier. Constant buffers should be divided into data that change:
never
per frame
per frame and per object.
and maybe others where necessary
And that implies the positions, where they should be created. Never changing buffers should be created once at the beginning of the application (just like vertex and index buffers). Per frame buffers should be created at the beginning of a new frame and per object buffers should be created when rendering the object.
Remember that buffers can exist without being set as the current buffer. Here's how the buffer life cycle would look like:
create vertex, index and never changing constant buffers
for each frame
create per frame constant buffer
for each object
create per object constant buffer
set vertex, index and all constant buffers
render the object
This guideline changes more or less slightly as you advance in progress. For example you could sort the objects by shaders, geometry and so on and set the buffers accordingly.

DirectX: Small distortion between 2 sprite polygons

Hello I use the same way to render sprites with directx from a long time but here I am rendering the screen in a texture and then render it with a big sprite on the screen.
For the camera I use that:
vUpVec=D3DXVECTOR3(0,1,0);
vLookatPt=D3DXVECTOR3(0,0,0);
vFromPt=D3DXVECTOR3(0,0,-1);
D3DXMatrixLookAtRH( &matView, &vFromPt, &vLookatPt, &vUpVec );
g_pd3dDevice->SetTransform( D3DTS_VIEW, &matView );
D3DXMatrixOrthoRH( &matProj, 1,1, 0.5f, 20 );
g_pd3dDevice->SetTransform( D3DTS_PROJECTION, &matProj );
And to render the sprite:
CUSTOMVERTEX* v;
spritevb->Lock( 0, 0, (void**)&v, 0 );
v[0].position = D3DXVECTOR3(-0.5f,-0.5f,0); v[0].u=0; v[0].v=1;
v[1].position = D3DXVECTOR3(-0.5f,0.5f,0); v[1].u=0; v[1].v=0;
v[2].position = D3DXVECTOR3(0.5f,-0.5f,0); v[2].u=1; v[2].v=1;
v[3].position = D3DXVECTOR3(0.5f,0.5f,0); v[3].u=1; v[3].v=0;
spritevb->Unlock();
g_pd3dDevice->DrawPrimitive( D3DPT_TRIANGLESTRIP, 0, 2 );
This is very basic and works, my sprite is rendered on the screen full.
But by looking closer I see that there's a small diagonal line through the screen (between the 2 polygons) not a colored one but like if them weren't perfectly positionned.
I thought about filtering and tried removing everything but maybe I forget something...
Thanks
To render to full screen best way is to not define any camera positions.
If you use as input positions
SimpleVertex vertices[] =
{
{ XMFLOAT3( -1.0f, 1.0f, 0.5f ), XMFLOAT2( 0.0f, 0.0f ) },
{ XMFLOAT3( 1.0f, 1.0f, 0.5f ), XMFLOAT2( 1.0f, 0.0f ) },
{ XMFLOAT3( 1.0f, -1.0f, 0.5f ), XMFLOAT2( 1.0f, 1.0f ) },
{ XMFLOAT3( -1.0f, -1.0f, 0.5f ), XMFLOAT2( 0.0f, 1.0f ) },
};
and in the Vertex Shader do
VS_OUTPUT RenderSceneVS( VS_INPUT input )
{
VS_OUTPUT Output;
Output.Position = input.Position;
Output.TextureUV = input.TextureUV;
return Output;
}
you get a render to full screen as well without having to worry about the viewing frustrum. Using this I never saw any lines between the two triangles.