Direct X & HLSL - Normal ReCalculation - c++

I'm using HLSL and DirectX 9. I'm trying to recalculate the normals of a mesh so that HLSL receives updated normals as a result of transforming the mesh. What method is best to do this...also...D3DXComputeNormals will not work for me because I do not use FVF_NORMAL as a vertex declaration...I declare vertex format like so:
const D3DVERTEXELEMENT9 dec[4] =
{
{0, 0, D3DDECLTYPE_FLOAT3, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_POSITION,0},
{0, 12, D3DDECLTYPE_FLOAT3, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_NORMAL, 0},
{0, 24, D3DDECLTYPE_FLOAT2, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_TEXCOORD,0},
D3DDECL_END()
};
I know how to access the adjacency data and vertex buffers but I'm not sure what method to use in order to properly associate a vertex and its normal with a face...Any help would be greatly appreciated. Thanks!

It's not a good idea to update the normals on CPU and send it to GPU each frame. That would ruin the performance. What you really should do is to calculate the transformed normals in the vertex shader, just like you do with the positions. HLSL code would look like this:
float4x4 mWorldView; // World * View
float4x4 mWorldViewProj; // World * View * Proj
struct VS_OUTPUT
{
float4 position : POSITION;
float2 tex : TEXCOORD0;
float3 normalVS : TEXCOORD1; // view-space normal
};
// Vertex Shader
VS_OUTPUT VS(float3 position : POSITION,
float3 normal : NORMAL,
float2 tex : TEXCOORD0)
{
VS_OUTPUT out;
// transform the position
out.position = mul(float4(position, 1), mWorldViewProj);
// pass the texture coordinates to the pixel shader
out.tex = tex;
// calculate the transformed normal
float3 n = mul(normal, (float3x3)mWorldView); // calculate view-space normal
// and use it for vertex lighting
// ... some shading calculations ...
// or pass it to the pixel shader and perform per-pixel lighting
out.normalVS = n;
// output
return out;
}

Related

D3D11 - passing blend weights and indices to vertex shader

I'm trying to pass blend weight and indices to my vertex shader as float4s; the struct that holds data for each vertex is as follows in C++:
struct VertexType_Skin {
XMFLOAT3 position;
XMFLOAT2 texture;
XMFLOAT3 normal;
XMFLOAT4 boneIds;
XMFLOAT4 boneWeights;
};
and in the HLSL vertex shader:
struct VS_IN {
float3 position : POSITION;
float2 tex : TEXCOORD0;
float3 normal : NORMAL;
float4 boneIds : BLENDINDICES;
float4 boneWeights : BLENDWEIGHT;
};
I'm setting up the vertex buffer as follows in C++:
D3D11_BUFFER_DESC vertexBufferDesc = { sizeof(VertexType_Skin) * vertexCount, D3D11_USAGE_DEFAULT, D3D11_BIND_VERTEX_BUFFER, 0, 0, 0 };
vertexData = { skinVertices, 0 , 0 };
device->CreateBuffer(&vertexBufferDesc, &vertexData, &vertexBuffer);
Now I'm not sure why, but doing this doesn't seem to render my mesh at all. Commenting out the float4s in both structs works fine (I'm not using the ids or weights yet, just trying to pass them).
Is there anything obvious I'm missing in this setup?
Thanks!
Nevermind, I figured it out. I don't know if this can ever help anyone because it's an oddly specific problem, but here goes anyway.
I was sending the mesh data like this each frame (because it was in a base class):
unsigned int stride = sizeof(VertexType);//<-- this struct only contains position, uvs and normals
unsigned int offset = 0;
deviceContext->IASetVertexBuffers(0, 1, &vertexBuffer, &stride, &offset);
deviceContext->IASetIndexBuffer(indexBuffer, DXGI_FORMAT_R32_UINT, 0);
deviceContext->IASetPrimitiveTopology(top);
So doing this instead solved the issue:
unsigned int stride = sizeof(VertexType_Skin);//<-- the updated struct with bone indices and weights
unsigned int offset = 0;
deviceContext->IASetVertexBuffers(0, 1, &vertexBuffer, &stride, &offset);
deviceContext->IASetIndexBuffer(indexBuffer, DXGI_FORMAT_R32_UINT, 0);
deviceContext->IASetPrimitiveTopology(top);
Ta-dah, easy as pie! Cheers :)

Background face is visible over foreground face in same mesh while using a diffuse shader in DirectX

I am trying to create a simple diffuse shader to paint primitive objects in DirectX 9 and faced following problem. When I used a DirectX primitive object like a Torus or Teapot, some faces in the foreground part of the mesh is invisible. I don't think this is the same thing as faces being invisible as I cannot reproduce this behavior for primitive objects like Sphere or Box where no two quads have the same normal. Following are some screenshots in fill and wire-frame modes.
torus fill-mode
Following is my vertex deceleration code.
// vertex position...
D3DVERTEXELEMENT9 element;
element.Stream = 0;
element.Offset = 0;
element.Type = D3DDECLTYPE_FLOAT3;
element.Method = D3DDECLMETHOD_DEFAULT;
element.Usage = D3DDECLUSAGE_POSITION;
element.UsageIndex = 0;
m_vertexElement.push_back(element);
// vertex normal
element.Stream = 0;
element.Offset = 12; //3 floats * 4 bytes per float
element.Type = D3DDECLTYPE_FLOAT3;
element.Method = D3DDECLMETHOD_DEFAULT;
element.Usage = D3DDECLUSAGE_NORMAL;
element.UsageIndex = 0;
m_vertexElement.push_back(element);
And shader code in development.
float4x4 MatWorld : register(c0);
float4x4 MatViewProj : register(c4);
float4 matColor : register(c0);
struct VS_INPUT
{
float4 Position : POSITION;
float3 Normal : NORMAL;
};
struct VS_OUTPUT
{
float4 Position : POSITION;
float3 Normal : TEXCOORD0;
};
struct PS_OUTPUT
{
float4 Color : COLOR0;
};
VS_OUTPUT vsmain(in VS_INPUT In)
{
VS_OUTPUT Out;
float4 wpos = mul(In.Position, MatWorld);
Out.Position = mul(wpos, MatViewProj);
Out.Normal = normalize(mul(In.Normal, MatWorld));
return Out;
};
PS_OUTPUT psmain(in VS_OUTPUT In)
{
PS_OUTPUT Out;
float4 ambient = {0.1, 0.0, 0.0, 1.0};
float3 light = {1, 0, 0};
Out.Color = ambient + matColor * saturate(dot(light, In.Normal));
return Out;
};
I have also tried setting different render states for Depth-Stencil but wasn't successful.
project files
I figure it out! this is a Depth Buffer(Z-Buffer) issue, you can enable Z-Buffer in your code, either by fixed pipeline or in the shader.
To enable z-buffer in fixed pipeline:
First add the following code when creating D3D deivce
d3dpp.EnableAutoDepthStencil = TRUE ;
d3dpp.AutoDepthStencilFormat = D3DFMT_D16 ;
Then enable z-buffer before drawing
device->SetRenderState(D3DRS_ZENABLE, TRUE) ;
At last, clear z-buffer in render function
device->Clear( 0, NULL, D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER, D3DCOLOR_XRGB(0,0,0), 1.0f, 0 );

how do i create 3d bounding box? (Direct 3d 11, HLSL)

I am using direct 3d for the first time and i am looking for a way to represent bounding boxes (rectangles/cylinders/spheres). How do i outline the bounding box? Is this a shader, or do i use somthing else to create an outlined shape?
chasester
The simplest way is to use a line list to create a unit wireframe bounding volume.
using a transform matrix, you can scale, translate or rotate the volume around any object in your 3D world.
accomplishing this requires 3 main parts:
A constant VB and IB with the linelist verts set up (remember it needs to be centered around the origin, and have a unit length of 1), plus a VB for per-instance data.
An input layout that takes in a transform matrix as a per instance member.
A vertex shader that applies the transform matrix to each vertex of the cube. (the pixel shader need only output the color as is supplied).
(It should be noted that the same principle applies to spheres, hulls etc. as well)
The shaders I use look like this:
struct WIRE_VS_INPUT
{
float3 Position : POSITION;
float4 Color : COLOR0;
float3 Transform : TRANSFORM;
float3 Size : SIZE;
};
struct WIRE_VS_OUTPUT
{
float4 Position : SV_POSITION;
float4 Color : COLOR0;
};
WIRE_VS_OUTPUT WireBoxVS( WIRE_VS_INPUT Input )
{
WIRE_VS_OUTPUT Output;
Output.Position = mul(float4((Input.Position * Input.Size) + Input.Transform,1),g_mWorldViewProjection);
Output.Color = Input.Color;
return Output;
}
float4 WireBoxPS( WIRE_VS_OUTPUT Input ) : SV_TARGET
{
return Input.Color;
}
The cube VB & IB setup:
const DirectX::XMFLOAT3 kWireCube[] =
{
DirectX::XMFLOAT3(-1,-1,-1),
DirectX::XMFLOAT3(1,-1,-1),
DirectX::XMFLOAT3(1,1,-1),
DirectX::XMFLOAT3(-1,1,-1),
DirectX::XMFLOAT3(-1,-1,1),
DirectX::XMFLOAT3(1,-1,1),
DirectX::XMFLOAT3(1,1,1),
DirectX::XMFLOAT3(-1,1,1)
};
const WORD kWireCubeIndices[] =
{
0,1,
1,2,
2,3,
3,0,
4,5,
5,6,
6,7,
7,4,
0,4,
1,5,
2,6,
3,7
};

HLSL and DX Skybox Issues (creates seams)

I'm (re)learning DirectX and have moved into HLSL coding. Prior to using my custom .fx file I created a skybox for a game with a vertex buffer of quads. Everything worked fine...texture mapped and wrapped beautifully. However now that I have HLSL setup to manage the vertices there are distinctive seams where the quads meet. The textures all line up properly I just cant get rid of this damn seam!
I tend to think the problem is with the texCube...or rather all the texturing information here. I'm texturing the quads in DX...it may just be that I still don't quite get the link between the two..not sure. Anyway thanks for the help in advance!
Heres the .fx file:
float4x4 World;
float4x4 View;
float4x4 Projection;
float3 CameraPosition;
Texture SkyBoxTexture;
samplerCUBE SkyBoxSampler = sampler_state
{
texture = <SkyBoxTexture>;
minfilter = ANISOTROPIC;
mipfilter = LINEAR;
AddressU = Wrap;
AddressV = Wrap;
AddressW = Wrap;
};
struct VertexShaderInput
{
float4 Position : POSITION0;
};
struct VertexShaderOutput
{
float4 Position : POSITION0;
float3 TextureCoordinate : TEXCOORD0;
};
VertexShaderOutput VertexShaderFunction(VertexShaderInput input)
{
VertexShaderOutput output;
float4 worldPosition = mul(input.Position, World);
float4 viewPosition = mul(worldPosition, View);
output.Position = mul(viewPosition, Projection);
float4 VertexPosition = mul(input.Position, World);
output.TextureCoordinate = VertexPosition - CameraPosition;
return output;
}
float4 PixelShaderFunction(VertexShaderOutput input) : COLOR0
{
return texCUBE(SkyBoxSampler, normalize(input.TextureCoordinate));
}
technique Skybox
{
pass Pass1
{
VertexShader = compile vs_2_0 VertexShaderFunction();
PixelShader = compile ps_2_0 PixelShaderFunction();
}
}
To avoid seams you need to draw your skybox in a single DrawIndexedPrimitive call, preferably using triangle strip. DON'T draw each face as separate primitive transformed with individual matrix or something like that - you WILL get seams. If you for some unexplainable reason don't want to use single DrawIndexedPrimitive call for skybox parts, then you must ensure that all faces are drawn using same matrix (same world + view + projection matrix used in every call) and same coordinate values for corner vertices - i.e. "top" face should use exactly same vectors (position) for corners that are used by "side" faces.
Another thing is that you should either store skybox as
cubemap (looks like that's what you're doing) - make just 8 vertices for skybox, draw them as indexed primitive.
Or an unwrapped "atlas" texture that has unused areas filled. with border color.
Or - if you're fine with shaders, you could "raytrace" skybox using shader.
You need to clamp the texture coordinates with setsampler state to get rid of the seam. This toymaker page explains this. Toymaker is a great site for learning Direct3D you should check out the tutorials if you have any more trouble.
You may like to draw a skybox using only one quad. Everything you need is an inverse of World*View*Proj matrix, that is (World*View*Proj)^(-1).
The vertices of the quad should be: (1, 1, 1, 1), (1, -1, 1, 1), (-1, 1, 1, 1), (-1, -1, 1, 1).
Then you compute texture coordinates in VS:
float4 pos = mul(vPos, WorldViewProjMatrixInv);
float3 tex_coord = pos.xyz / pos.w;
And finally you sample the texture in PS:
float4 color = texCUBE(sampler, tex_coord);
No worry about any seams! :)

Shader and opengl transformations

When i add shaders (in cg) to my opengl program all the local transformations (glRotatef, glTranslatef and glScalef between glPushMatrix and glPopMatrix) stop working. Transforms outside push/pop still working though. So what might be the problem here?
update:
I have a rotating cube in the center of the scene:
glPushMatrix();
glRotatef(angle, 1, 0, 0);
drawBox();
glPopMatrix();
and after that i send worldview and worldviewprojection matrices to shader:
cgGLSetStateMatrixParameter(
myCgVertexParam_modelViewProj,
CG_GL_MODELVIEW_PROJECTION_MATRIX,
CG_GL_MATRIX_IDENTITY
);
cgGLSetStateMatrixParameter(
myCgVertexParam_modelView,
CG_GL_MODELVIEW_MATRIX,
CG_GL_MATRIX_IDENTITY
);
Vertex shader code:
void C9E2v_fog(float4 position : POSITION,
float4 color : COLOR,
out float4 oPosition : POSITION,
out float4 oColor : COLOR,
out float fogExponent : TEXCOORD1,
uniform float fogDensity, // Based on log2
uniform float4x4 modelViewProj : MODELVIEW_PROJECTION_MATRIX,
uniform float4x4 modelView : MODELVIEW_MATRIX)
{
// Assume nonprojective modelview matrix
float3 eyePosition = mul(modelView, position).xyz;
float fogDistance = length(eyePosition);
fogExponent = fogDistance * fogDensity;
oPosition = mul(modelViewProj, position);
//oDecalCoords = decalCoords;
oColor = color;
}
So in the end cube doesnt rotate, but if i do write (no push/pop)
glRotatef(angle, 1, 0, 0);
drawBox();
everything works fine. How do i fix that?
You can use either fixed function pipeline or programmable one. Since you switched to shaders, fixed function pipeline "stopped working". To switch back you need to glUseProgram(0). And you need to send those local transformations to the shader.