Hi I am just learning directx11 and have come across a problem with PSSetShaderResources
if I change textures before the call to swapchain->Present only the first texture is displayed.
if I change textures between Present calls both are displayed but on consecutive frames.
Is there anyway of changing textures with PSSetShaderResources so I can use both (or more) on a single frame?
I know I can use texture arrays but it appears to me that you must have same size textures?
also I could upload two textures (or more) at a time but I would then have to have conditional statements within shader.
Below is the drawing loop I am using. And the simple shader that I am using
any help would be appreciated.
Paul
24OCT2014
tempBool = false;
for (int j = 0; j < 2; j++) //change 2 to texCount
{
devcon->UpdateSubresource(pConstantBuffer, 0, NULL, &cb, 0, 0);
devcon->VSSetConstantBuffers(0, 1, &pConstantBuffer);
devcon->PSSetConstantBuffers(0, 1, &pConstantBuffer);
tempBool = !tempBool;
if (tempBool)
{
devcon->PSSetShaderResources(0, 1, &pTex[0]);
}
else
{
devcon->PSSetShaderResources(0, 1, &pTex[1]);
}
for (int i = 0; i < texRun[j]; i++)
{
devcon->Draw(obLens[curPos+i], obStarts[curPos+i]);
}
curPos += texRun[j];
}
swapchain->Present(0, 0);
Texture2D txDiffuse : register( t0 );
SamplerState samLinear : register( s0 );
cbuffer ConstantBuffer : register( b0 )
{
matrix World;
matrix View;
matrix Projection;
float4 vLightDir;
float4 vLightColor;
};
struct VOut
{
float4 Pos : SV_POSITION;
float3 Norm : NORMAL;
float2 Tex : TEXCOORD;
};
VOut VShader(float4 position : POSITION, float3 Norm : NORMAL, float2 Tex : TEXCOORD)
{
VOut output = (VOut)0;
output.Pos = mul( position, World );
output.Pos = mul( output.Pos, View );
output.Pos = mul( output.Pos, Projection );
output.Norm = mul(Norm, World);
output.Tex = Tex;
return output;
}
float4 PShader0(float4 position : SV_POSITION, float3 Norm : NORMAL,
float2 Tex : TEXCOORD ) : SV_TARGET
{
float4 diffuse = 0;
diffuse = txDiffuse.Sample( samLinear, Tex );
float4 finalColor = 0;
finalColor = diffuse; // * 0.2;
finalColor += saturate( dot((float3)vLightDir,Norm) * diffuse);
finalColor.a = 1.0;
return finalColor;
}
Finally tracked down the error it was in my object loading code and nothing to do width direct3d
I was Loading second lot of objects over the first lot
for first lot of objects I am reading into verts which is my vertex data pointer
for the second lot I was also reading into verts instead of &verts[firstVertsCount]
anyway thanks for the help
Paul
Related
Some 3d meshes that get exported to Wavefront.obj format usually come with a .mtl file that has additional data to the texture it uses and its materials, when exported from Blender they always come with Ambient, Diffuse, Specular, and Emissive RGB data as part of its material, but I'm not sure how I can use this data in the pixel shader and get the right color output.
I would appreciate it if anyone can explain to me how to use these materials and any code sample would be very welcome.
Traditional materials and lighting models use "Ambient", "Diffuse", "Specular", and "Emissive" colors which is why you find those in Wavefront OBJ files. These can often be replaced or used in multiplicative conjunction with texture colors.
The (now defunct) XNA Game Studio product did a good job of providing simple 'classic' shaders in the BasicEffect "Stock Shaders". I use them in the DirectX Tool Kit for DX11 and DX12.
Take a look at BasicEffect.fx for a traditional material pixel shader. If you are looking mostly for pixel-shader handling, that's "per-pixel lighting" as opposed to "vertex lighting" which was more common back when we had less powerful GPUs.
Here's a 'inlined' version so you can follow it all in one place:
struct VSInputNmTx
{
float4 Position : SV_Position;
float3 Normal : NORMAL;
float2 TexCoord : TEXCOORD0;
};
Texture2D<float4> Texture : register(t0);
sampler Sampler : register(s0);
cbuffer Parameters : register(b0)
{
float4 DiffuseColor : packoffset(c0);
float3 EmissiveColor : packoffset(c1);
float3 SpecularColor : packoffset(c2);
float SpecularPower : packoffset(c2.w);
float3 LightDirection[3] : packoffset(c3);
float3 LightDiffuseColor[3] : packoffset(c6);
float3 LightSpecularColor[3] : packoffset(c9);
float3 EyePosition : packoffset(c12);
float3 FogColor : packoffset(c13);
float4 FogVector : packoffset(c14);
float4x4 World : packoffset(c15);
float3x3 WorldInverseTranspose : packoffset(c19);
float4x4 WorldViewProj : packoffset(c22);
};
struct VSOutputPixelLightingTx
{
float2 TexCoord : TEXCOORD0;
float4 PositionWS : TEXCOORD1;
float3 NormalWS : TEXCOORD2;
float4 Diffuse : COLOR0;
float4 PositionPS : SV_Position;
};
// Vertex shader: pixel lighting + texture.
VSOutputPixelLighting VSBasicPixelLightingTx(VSInputNmTx vin)
{
VSOutputPixelLighting vout;
vout.PositionPS = mul(vin.Position, WorldViewProj);
vout.PositionWS.xyz = mul(vin.Position, World).xyz;
// ComputeFogFactor
vout.PositionWS.w = saturate(dot(vin.Position, FogVector));
vout.NormalWS = normalize(mul(vin.Normal, WorldInverseTranspose));
vout.Diffuse = float4(1, 1, 1, DiffuseColor.a);
vut.TexCoord = vin.TexCoord;
return vout;
}
struct PSInputPixelLightingTx
{
float2 TexCoord : TEXCOORD0;
float4 PositionWS : TEXCOORD1;
float3 NormalWS : TEXCOORD2;
float4 Diffuse : COLOR0;
};
// Pixel shader: pixel lighting + texture.
float4 PSBasicPixelLightingTx(PSInputPixelLightingTx pin) : SV_Target0
{
float4 color = Texture.Sample(Sampler, pin.TexCoord) * pin.Diffuse;
float3 eyeVector = normalize(EyePosition - pin.PositionWS.xyz);
float3 worldNormal = normalize(pin.NormalWS);
ColorPair lightResult = ComputeLights(eyeVector, worldNormal, 3);
color.rgb *= lightResult.Diffuse;
// AddSpecular
color.rgb += lightResult.Specular * color.a;
// ApplyFog (we passed fogfactor in via PositionWS.w)
color.rgb = lerp(color.rgb, FogColor * color.a, pin.PositionWS.w);
return color;
}
Here is the helper function ComputeLights which implements a Blinn-Phong reflection model for the specular highlight.
struct ColorPair
{
float3 Diffuse;
float3 Specular;
};
ColorPair ComputeLights(float3 eyeVector, float3 worldNormal, uniform int numLights)
{
float3x3 lightDirections = 0;
float3x3 lightDiffuse = 0;
float3x3 lightSpecular = 0;
float3x3 halfVectors = 0;
[unroll]
for (int i = 0; i < numLights; i++)
{
lightDirections[i] = LightDirection[i];
lightDiffuse[i] = LightDiffuseColor[i];
lightSpecular[i] = LightSpecularColor[i];
halfVectors[i] = normalize(eyeVector - lightDirections[i]);
}
float3 dotL = mul(-lightDirections, worldNormal);
float3 dotH = mul(halfVectors, worldNormal);
float3 zeroL = step(0, dotL);
float3 diffuse = zeroL * dotL;
float3 specular = pow(max(dotH, 0) * zeroL, SpecularPower) * dotL;
ColorPair result;
result.Diffuse = mul(diffuse, lightDiffuse) * DiffuseColor.rgb + EmissiveColor;
result.Specular = mul(specular, lightSpecular) * SpecularColor;
return result;
}
These BasicEffect shaders don't make use of ambient color, but you could modify them to do so if you wanted. All ambient color does is provide a 'minimum color value' that's independent of dynamic lights.
Note that there also some unofficial Physically-Based Rendering (PBR) materials extension in some Wavefront OBJ files. See Extending Wavefront MTL for Physically-Based. More modern geometry formats like glTF assume PBR materials properties which is things like an albedo texture, normal texture, roughness/metalness texture, etc..
I'm working on a graphics program in DirectX11 which takes a heightmap, manipulates a plane based on the heightmap, calculates the normals, and lights the scene with a directional light. I'm trying to get shadow mapping to work with the directional light, but whenever I do the depth test in the pixel shader it always returns 1, instead of the expected value.
Here's my main pixel shader
Texture2D t0 : register(t0);
Texture2D t1 : register(t1);
SamplerState s0 : register(s0);
SamplerState s1 : register(s1);
cbuffer LightBuffer : register(b0)
{
float4 diffuseColour;
float3 lightDirection;
float padding;
}
struct InputType
{
float4 position : SV_POSITION;
float4 tex: TEXCOORD0;
float3 normal : normal;
float4 lightViewPos : TEXCOORD1;
};
float4 calculateLighting(float3 lightDirection, float3 normal, float4 diffuse)
{
float intensity = saturate(dot(normal, lightDirection));
float4 colour = saturate(diffuse * intensity);
return colour;
}
float4 main(InputType input) : SV_TARGET
{
float depthValue;
float lightDepthValue;
float shadowMapBias = 0.005f;
float4 colour = float4(0.f, 0.f, 0.f, 1);
float4 textureColour = t0.Sample(s0, input.tex.xy);
float2 pTexCoord = input.lightViewPos.xy / input.lightViewPos.w;
pTexCoord *= float2(0.5, -0.5);
pTexCoord += float2(0.5, 0.5);
if (pTexCoord.x < 0.0f || pTexCoord.x > 1.0f ||
pTexCoord.y < 0.0f || pTexCoord.y > 1.0f)
{
return textureColour;
}
depthValue = t1.Sample(s1, pTexCoord).r;
lightDepthValue = input.lightViewPos.z / input.lightViewPos.w;
lightDepthValue -= shadowMapBias;
if (lightDepthValue < depthValue)
{
//colour = float4(1, 1, 1, 1);
colour = calculateLighting(-lightDirection, input.normal, diffuseColour);
}
// This is a test to see what the depth value outputs as
if (depthValue == 0)
return float4(1, 0, 1, 1);
else if (depthValue == 1)
return float4(1, 1, 1, 1); // Always returns this
else
return float4(0, 1, 0, 1);
//return float4(depthValue, depthValue, depthValue, 1.0f);
// return saturate(colour * textureColour);
}
My depth vertex shader:
Texture2D t0 : register(t0);
SamplerState s0 : register(s0);
cbuffer MatrixBuffer : register(b0)
{
matrix worldMatrix;
matrix viewMatrix;
matrix projectionMatrix;
}
struct InputType
{
float4 position : position;
float2 tex : TEXCOORD0;
};
struct OutputType
{
float4 position : SV_POSITION;
float4 depthPosition : TEXCOORD0;
};
OutputType main(InputType input)
{
OutputType output;
output.position = mul(input.position, worldMatrix);
if (t0.SampleLevel(s0, input.tex, 0).r > 0.05f)
output.position.y += (t0.SampleLevel(s0, input.tex, 0).r * 50.0f);
output.position = mul(output.position, viewMatrix);
output.position = mul(output.position, projectionMatrix);
output.depthPosition = output.position;
return output;
}
And my depth pixel shader:
struct InputType
{
float4 position : SV_POSITION;
float4 depthPosition : TEXCOORD0;
};
float4 main(InputType input) : SV_Target
{
float depthValue;
depthValue = input.depthPosition.z / input.depthPosition.w;
return float4(depthValue, depthValue, depthValue, 1.0f);
}
I know the depth data is being written to the render texture correctly as I have a preview of it on the screen current view + normals shown to demonstrate topography instead of a flat white plane.
I'm new to directx and I need to set up opacity in my shaders. I don't use textures or anything and I just need to add opacity to whole objects based on one variable in shader.
I've tried to simply put this variable in shader's output color alpha channel, but it didn't work and I guess that the solution of this problem is not in shaders. This is my shader so far.
cbuffer ConstantBuffer : register(b0)
{
matrix World;
matrix View;
matrix Projection;
float4 vLigthDir[2];
float4 vLigthColor[2];
float4 vOutputColor;
float intensity;
float3 colors;
}
struct VS_INPUT
{
float4 pos : POSITION;
float3 norm : NORMAL;
};
struct PS_INPUT
{
float4 pos : SV_POSITION;
float3 norm : TEXCOORD0;
};
PS_INPUT VS(VS_INPUT input)
{
PS_INPUT output = (PS_INPUT) 0;
output.pos = mul(input.pos, World);
output.pos = mul(output.pos, View);
output.pos = mul(output.pos, Projection);
output.norm = mul(input.norm, World);
return output;
}
float4 PS(PS_INPUT input) : SV_Target
{
float4 finalColor = 0;
finalColor.r = colors.r;
finalColor.g = colors.g;
finalColor.b = colors.b;
finalColor.a = intensity;
return finalColor;
}
float4 PSSolid(PS_INPUT input) : SV_Target
{
return vOutputColor;
}
//Create BlendState
D3D11_BLEND_DESC blendDesc;
blendDesc.AlphaToCoverageEnable = false;
blendDesc.IndependentBlendEnable = false;
blendDesc.RenderTarget[0].BlendEnable = true;
blendDesc.RenderTarget[0].SrcBlend = D3D11_BLEND_SRC_ALPHA;
blendDesc.RenderTarget[0].DestBlend = D3D11_BLEND_INV_SRC_ALPHA;
blendDesc.RenderTarget[0].BlendOp = D3D11_BLEND_OP_ADD;
blendDesc.RenderTarget[0].SrcBlendAlpha = D3D11_BLEND_ONE;
blendDesc.RenderTarget[0].DestBlendAlpha = D3D11_BLEND_ZERO;
blendDesc.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD;
blendDesc.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL;
hr = g_pd3dDevice->CreateBlendState(&blendDesc, &pBlendState);
//Using BlendState
float blendFactor[4] = { 1.0f, 1.0f, 1.0f, 1.0f };
UINT sampleMask = 0xffffffff;
g_pImmediateContext->OMGetBlendState(&pBlendState, blendFactor, &sampleMask);
///////////Rendering geometry part///////////
g_pImmediateContext->OMGetBlendState(NULL, NULL, NULL);
I expect some objects to have opacity, but instead nothing happenening at all, there are just solid color.
I am trying to create a simple diffuse shader to paint primitive objects in DirectX 9 and faced following problem. When I used a DirectX primitive object like a Torus or Teapot, some faces in the foreground part of the mesh is invisible. I don't think this is the same thing as faces being invisible as I cannot reproduce this behavior for primitive objects like Sphere or Box where no two quads have the same normal. Following are some screenshots in fill and wire-frame modes.
torus fill-mode
Following is my vertex deceleration code.
// vertex position...
D3DVERTEXELEMENT9 element;
element.Stream = 0;
element.Offset = 0;
element.Type = D3DDECLTYPE_FLOAT3;
element.Method = D3DDECLMETHOD_DEFAULT;
element.Usage = D3DDECLUSAGE_POSITION;
element.UsageIndex = 0;
m_vertexElement.push_back(element);
// vertex normal
element.Stream = 0;
element.Offset = 12; //3 floats * 4 bytes per float
element.Type = D3DDECLTYPE_FLOAT3;
element.Method = D3DDECLMETHOD_DEFAULT;
element.Usage = D3DDECLUSAGE_NORMAL;
element.UsageIndex = 0;
m_vertexElement.push_back(element);
And shader code in development.
float4x4 MatWorld : register(c0);
float4x4 MatViewProj : register(c4);
float4 matColor : register(c0);
struct VS_INPUT
{
float4 Position : POSITION;
float3 Normal : NORMAL;
};
struct VS_OUTPUT
{
float4 Position : POSITION;
float3 Normal : TEXCOORD0;
};
struct PS_OUTPUT
{
float4 Color : COLOR0;
};
VS_OUTPUT vsmain(in VS_INPUT In)
{
VS_OUTPUT Out;
float4 wpos = mul(In.Position, MatWorld);
Out.Position = mul(wpos, MatViewProj);
Out.Normal = normalize(mul(In.Normal, MatWorld));
return Out;
};
PS_OUTPUT psmain(in VS_OUTPUT In)
{
PS_OUTPUT Out;
float4 ambient = {0.1, 0.0, 0.0, 1.0};
float3 light = {1, 0, 0};
Out.Color = ambient + matColor * saturate(dot(light, In.Normal));
return Out;
};
I have also tried setting different render states for Depth-Stencil but wasn't successful.
project files
I figure it out! this is a Depth Buffer(Z-Buffer) issue, you can enable Z-Buffer in your code, either by fixed pipeline or in the shader.
To enable z-buffer in fixed pipeline:
First add the following code when creating D3D deivce
d3dpp.EnableAutoDepthStencil = TRUE ;
d3dpp.AutoDepthStencilFormat = D3DFMT_D16 ;
Then enable z-buffer before drawing
device->SetRenderState(D3DRS_ZENABLE, TRUE) ;
At last, clear z-buffer in render function
device->Clear( 0, NULL, D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER, D3DCOLOR_XRGB(0,0,0), 1.0f, 0 );
I'm trying to create a Cube Map in DirectX 9, but for some reason it's not working. I've used DirectX's Texture Utility to create a dds texture file for the cube, but when I draw it, it's only drawing a solid color. Here's the code I've done:
SkyBox.h
#pragma once
#include<D3DX9Mesh.h>
#include"DirectX.h"
class SkyBox{
public:
SkyBox(LPCSTR textureFile);
~SkyBox();
void Draw();
protected:
IDirect3DCubeTexture9* texture;
LPD3DXMESH mesh;
};
SkyBox.cpp
#include"SkyBox.h"
SkyBox::SkyBox(LPCSTR textureFile)
{
D3DXCreateBox(DirectX::device, 1.0f, 1.0f, 1.0f, &mesh, NULL);
D3DXCreateCubeTextureFromFile(DirectX::device, textureFile, &texture);
}
SkyBox::~SkyBox()
{
mesh->Release();
texture->Release();
}
void SkyBox::Draw()
{
D3DXHANDLE textureHandle = DirectX::currentShaderEffect->GetParameterByName(0, "tex0");
DirectX::currentShaderEffect->SetTexture(textureHandle, texture);
DirectX::currentShaderEffect->CommitChanges();
UINT passNum = 5;
DirectX::currentShaderEffect->Begin(&passNum, 0);
DirectX::currentShaderEffect->BeginPass(5);
mesh->DrawSubset(0);
DirectX::currentShaderEffect->EndPass();
DirectX::currentShaderEffect->End();
}
And this is my shader for the Cube Map:
uniform extern float4x4 mvp;
uniform extern texture tex0;
struct SkyboxVS
{
float4 pos : POSITION0;
float3 uv0 : TEXCOORD0;
};
sampler SkyBoxTex = sampler_state
{
Texture = <tex0>;
MinFilter = LINEAR;
MagFilter = LINEAR;
MipFilter = LINEAR;
AddressU = WRAP;
AddressV = WRAP;
};
SkyboxVS VertexSkybox(float3 position : POSITION0, float3 texCoord : TEXCOORD0)
{
SkyboxVS skyVS = (SkyboxVS)0;
skyVS.pos = mul(float4(position, 1.0f), mvp);
skyVS.uv0 = texCoord;
return skyVS;
}
float4 PixelSkybox(float3 texCoord: TEXCOORD0) : COLOR
{
float4 color = texCUBE(SkyBoxTex, texCoord);
return color;
}
technique TransformTech
{
pass P5
{
vertexShader = compile vs_2_0 VertexSkybox();
pixelShader = compile ps_2_0 PixelSkybox();
ZFunc = Always;
StencilEnable = true;
StencilFunc = Always;
StencilPass = Replace;
StencilRef = 0;
}
}
Here's some sample code:
Sky::Sky(const std::string& envmapFilename, float skyRadius)
: mRadius(skyRadius)
{
HR(D3DXCreateSphere(gd3dDevice, skyRadius, 30, 30, &mSphere, 0));
HR(D3DXCreateCubeTextureFromFile(gd3dDevice, envmapFilename.c_str(), &mEnvMap));
ID3DXBuffer* errors = 0;
HR(D3DXCreateEffectFromFile(gd3dDevice, "sky.fx", 0, 0, 0,
0, &mFX, &errors));
if( errors )
MessageBox(0, (char*)errors->GetBufferPointer(), 0, 0);
mhTech = mFX->GetTechniqueByName("SkyTech");
mhWVP = mFX->GetParameterByName(0, "gWVP");
mhEnvMap = mFX->GetParameterByName(0, "gEnvMap");
// Set effect parameters that do not vary.
HR(mFX->SetTechnique(mhTech));
HR(mFX->SetTexture(mhEnvMap, mEnvMap));
}
void Sky::draw()
{
// Sky always centered about camera's position.
D3DXMATRIX W;
D3DXVECTOR3 p = gCamera->pos();
D3DXMatrixTranslation(&W, p.x, p.y, p.z);
HR(mFX->SetMatrix(mhWVP, &(W*gCamera->viewProj())));
UINT numPasses = 0;
HR(mFX->Begin(&numPasses, 0));
HR(mFX->BeginPass(0));
HR(mSphere->DrawSubset(0));
HR(mFX->EndPass());
HR(mFX->End());
}
And shader code:
OutputVS EnvMapVS(float3 posL : POSITION0, float3 normalL : NORMAL0, float2 tex0: TEXCOORD0)
{
// Zero out our output.
OutputVS outVS = (OutputVS)0;
// Transform normal to world space.
outVS.normalW = mul(float4(normalL, 0.0f), gWorldInvTrans).xyz;
// Transform vertex position to world space.
float3 posW = mul(float4(posL, 1.0f), gWorld).xyz;
// Compute the unit vector from the vertex to the eye.
outVS.toEyeW = gEyePosW - posW;
// Transform to homogeneous clip space.
outVS.posH = mul(float4(posL, 1.0f), gWVP);
// Pass on texture coordinates to be interpolated in rasterization.
outVS.tex0 = tex0;
// Done--return the output.
return outVS;
}
float4 EnvMapPS(float3 normalW : TEXCOORD0,
float3 toEyeW : TEXCOORD1,
float2 tex0 : TEXCOORD2) : COLOR
{
// Interpolated normals can become unnormal--so normalize.
normalW = normalize(normalW);
toEyeW = normalize(toEyeW);
// Light vector is opposite the direction of the light.
float3 lightVecW = -gLight.dirW;
// Compute the reflection vector.
float3 r = reflect(-lightVecW, normalW);
// Determine how much (if any) specular light makes it into the eye.
float t = pow(max(dot(r, toEyeW), 0.0f), gMtrl.specPower);
// Determine the diffuse light intensity that strikes the vertex.
float s = max(dot(lightVecW, normalW), 0.0f);
// Get the texture color.
float4 texColor = tex2D(TexS, tex0);
// Get the reflected color.
float3 envMapTex = reflect(-toEyeW, normalW);
float3 reflectedColor = texCUBE(EnvMapS, envMapTex);
// Weighted average between the reflected color, and usual
// diffuse/ambient material color modulated with the texture color.
float3 ambientMtrl = gReflectivity*reflectedColor + (1.0f-gReflectivity)*(gMtrl.ambient*texColor);
float3 diffuseMtrl = gReflectivity*reflectedColor + (1.0f-gReflectivity)*(gMtrl.diffuse*texColor);
// Compute the ambient, diffuse and specular terms separately.
float3 spec = t*(gMtrl.spec*gLight.spec).rgb;
float3 diffuse = s*(diffuseMtrl*gLight.diffuse.rgb);
float3 ambient = ambientMtrl*gLight.ambient;
float3 final = ambient + diffuse + spec;
// Output the color and the alpha.
return float4(final, gMtrl.diffuse.a*texColor.a);
}