Using shader resources in HLSL (Port DX9->DX10) - hlsl

I'm trying to port my DX9 volume renderer to a DX10 version. Currently, i'm stuck at the following error:
D3D10: ERROR: ID3D10Device::DrawIndexed: The view dimension declared in the shader code does not match the view type bound to slot 0 of the Pixel Shader unit. This is invalid if the shader actually uses the view (e.g. it is not skipped due to shader code branching). [ EXECUTION ERROR #354: DEVICE_DRAW_VIEW_DIMENSION_MISMATCH ]
My guess is that I'm not sending the 2D and/or 3D textures (shader resources) to the shader in the correct way; or do not use them in the correct (dx10) way. The DX9 code was something like the following (simplified for the sake of this question):
HRESULT hr;
int nVertexShaderIndex = 0;
// Setup the 2D Dependent Lookup Texture
hr = m_pDevice->SetTexture(0, lookupTexture); // lookupTexture is a LPDIRECT3DTEXTURE9
if (hr != D3D_OK) {
//handle error
}
m_pDevice->SetSamplerState(0, D3DSAMP_ADDRESSU, D3DTADDRESS_CLAMP);
m_pDevice->SetSamplerState(0, D3DSAMP_ADDRESSV, D3DTADDRESS_CLAMP);
m_pDevice->SetSamplerState(0, D3DSAMP_MAGFILTER, D3DTEXF_POINT);
m_pDevice->SetSamplerState(0, D3DSAMP_MINFILTER, D3DTEXF_POINT);
// Maximum Intensity
m_pDevice->SetRenderState( D3DRS_ALPHABLENDENABLE, TRUE); // Enable Alpha blend
m_pDevice->SetRenderState( D3DRS_SRCBLEND, D3DBLEND_ONE); // 1 * SRC color
m_pDevice->SetRenderState( D3DRS_DESTBLEND, D3DBLEND_ONE); // 1 * DST color
m_pDevice->SetRenderState( D3DRS_BLENDOP, D3DBLENDOP_MAX); // MAX blend
m_pDevice->SetRenderState( D3DRS_ZENABLE, D3DZB_FALSE ); // Disable Z
A 3D volume texture with the actual data is send in a similar manner. The corresponding pixel shader code:
PS_OUTPUT Main(VS_OUTPUT vsIn,
uniform sampler2D lookupTexture : TEXUNIT0,
uniform sampler3D dataTexture : TEXUNIT1)
{
PS_OUTPUT psOut;
float dataValue;
psOut.color = SampleWith2DLookup(vsIn.TexCoord0,
lookupTexture,
dataTexture,
dataValue);
return psOut;
}
float4 LookupIn2DTexture(float value,
uniform sampler2D lookupTexture)
{
float2 lutCoord;
float4 outColor;
// Build a 2D Coordinate for lookup
lutCoord[0] = value;
lutCoord[1] = 0.0f;
outColor = tex2D(lookupTexture, lutCoord);
return(outColor);
}
float4 SampleWith2DLookup(const float3 TexCoord,
uniform sampler2D lookupTexture,
uniform sampler3D dataTexture,
out float dataValue)
{
float value;
float4 outputColor;
value = Sample(TexCoord, dataTexture);
outputColor = LookupIn2DTexture(value, lookupTexture);
dataValue = value;
return(outputColor);
}
In DX10 we can simplify some of the shader code (as far as I understand). I create an empty texture and fill this texture with map()/unmap(). Next I bind it as a shader resource to my PS. The c++ and shader code become the following:
// CREATE THE EMPTY TEXTURE
D3D10_TEXTURE2D_DESC desc;
ZeroMemory(&desc, sizeof(desc));
desc.Width = 4096;
desc.Height = 1;
desc.ArraySize = 1;
desc.MipLevels = 1;
desc.Format = GetHardwareResourceFormatDX10();
desc.Usage = D3D10_USAGE_DYNAMIC;
desc.BindFlags = D3D10_BIND_SHADER_RESOURCE;
desc.CPUAccessFlags = D3D10_CPU_ACCESS_WRITE;
desc.SampleDesc.Count = 1;
hr = m_pDeviceDX10->CreateTexture2D(&desc, NULL, &lookupTexture);
bind to shader:
// SEND TO SHADER
ID3D10ShaderResourceView* pTexDepSurface = NULL;
D3D10_SHADER_RESOURCE_VIEW_DESC srvDesc;
D3D10_TEXTURE2D_DESC desc;
pTexDep->GetDesc( &desc );
srvDesc.Format = desc.Format;
srvDesc.ViewDimension = D3D10_SRV_DIMENSION_TEXTURE2D;
srvDesc.Texture2D.MipLevels = desc.MipLevels;
srvDesc.Texture2D.MostDetailedMip = desc.MipLevels -1;
hr = m_pDeviceDX10->CreateShaderResourceView(pTexDep, &srvDesc, &pTexDepSurface);
if (FAILED(hr)) {
//handle here
}
m_pDeviceDX10->PSSetShaderResources(0,1, &pTexDepSurface);
Use in shader:
Texture2D LookupTexture : register(t0);
SamplerState LookupSampler : register(s0);
Texture2D VolumeTexture : register(t1);
SamplerState VolumeSampler : register(s1);
PS_OUTPUT Main(VS_OUTPUT vsIn,
uniform sampler2D lookupTexture : TEXUNIT0,
uniform sampler3D dataTexture : TEXUNIT1)
{
PS_OUTPUT psOut;
float dataValue;
dataValue = VolumeTexture.Sample(VolumeSampler,vsIn.TexCoord0);
psOut.color = LookupTexture.Sample(LookupSampler,dataValue);
return psOut;
}
Note that it is just an educated guess that the error is introduced by this code. If the code above looks correct to you, please respond so as well (in comments). In that case, a new direction to find a solution would be valued.

After a day's work I did find my problem; I forgot to recompile my updated shaders. So the DX9 version was still loaded instead of the DX10 version of the shader, very stupid, but also very common mistake.

Related

Vulkan Transparent 2D Textures

I'm trying to load 2D sprites containing transparent pixels using Vulkan, where so far I have been able to load the sprite, but have not been able to get the transparency working (Transparent pixels should blend with the background color blue). I'm not sure what to do to get it right.
Color Blend State:
VkPipelineColorBlendAttachmentState colorBlendAttachment {};
colorBlendAttachment.colorWriteMask =
VK_COLOR_COMPONENT_R_BIT |
VK_COLOR_COMPONENT_G_BIT |
VK_COLOR_COMPONENT_B_BIT |
VK_COLOR_COMPONENT_A_BIT;
colorBlendAttachment.blendEnable = VK_TRUE;
colorBlendAttachment.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
colorBlendAttachment.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
colorBlendAttachment.colorBlendOp = VK_BLEND_OP_ADD;
colorBlendAttachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
colorBlendAttachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
colorBlendAttachment.alphaBlendOp = VK_BLEND_OP_ADD;
VkPipelineColorBlendStateCreateInfo colorBlendState {};
colorBlendState.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
colorBlendState.logicOpEnable = VK_FALSE;
colorBlendState.logicOp = VK_LOGIC_OP_COPY;
colorBlendState.attachmentCount = 1;
colorBlendState.pAttachments = &colorBlendAttachment;
colorBlendState.blendConstants[0] = 1.f;
colorBlendState.blendConstants[1] = 1.f;
colorBlendState.blendConstants[2] = 1.f;
colorBlendState.blendConstants[3] = 1.f;
Depth Stencil State:
VkPipelineDepthStencilStateCreateInfo info {};
Rasterization State:
VkPipelineRasterizationStateCreateInfo info {};
info.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
info.depthClampEnable = VK_FALSE;
info.rasterizerDiscardEnable = VK_FALSE;
info.polygonMode = polygonMode;
info.lineWidth = 1.0f;
info.cullMode = VK_CULL_MODE_NONE;
info.frontFace = VK_FRONT_FACE_CLOCKWISE;
info.depthBiasEnable = VK_FALSE;
info.depthBiasConstantFactor = 0.0f;
info.depthBiasClamp = 0.0f;
info.depthBiasSlopeFactor = 0.0f;
Vertex Shader:
#version 460
layout (location = 0) in vec2 vPosition;
layout (location = 1) in vec2 vTexCoord;
layout (location = 0) out vec2 texCoord;
void main()
{
gl_Position = vec4(vPosition, 0.0f, 1.0f);
texCoord = vTexCoord;
}
Fragment Shader:
#version 460
layout (location = 0) in vec2 texCoord;
layout (location = 0) out vec4 outFragColor;
layout(set = 0, binding = 0) uniform sampler2D tex;
void main()
{
vec3 color = texture(tex, texCoord).xyz;
outFragColor = vec4(color, 1.0f);
}
When rendering in Vulkan we have to be explicit with how the color blending should be performed. You can read more here.
Some common mistakes when it comes to color blending is choosing the wrong blend factors and blend operations. But it's also very common to forget enabling blending .blendEnable = true;. Because In Vulkan you're able to attach attachments without them taking effect, which is why you have to be explicit
typedef struct VkPipelineColorBlendAttachmentState
{
VkBool32 blendEnable;
VkBlendFactor srcColorBlendFactor;
VkBlendFactor dstColorBlendFactor;
VkBlendOp colorBlendOp;
VkBlendFactor srcAlphaBlendFactor;
VkBlendFactor dstAlphaBlendFactor;
VkBlendOp alphaBlendOp;
VkColorComponentFlags colorWriteMask;
} VkPipelineColorBlendAttachmentState;
It may also be that you're pixel shader is not taking your textures alpha values into consideration as shown in the question above outFragColor = vec4(color, 1.0f);. Instead one should retrieve the texel coordinate as a vector4 texture(texture, uv).xyzw;
One may also be using the wrong file format or even simply not having proper transparency in texture files. Recommended texture files for game engines: dds, png, tga
In general, fully transparent pixels of 2D textures should not be handled by blend factors in Vulkan. If you cannot guarantee ordered drawing it is very easy to end with scenarios where transparent pixels overwrite non-transparent sections.
The fast solution is to discard transparent pixels in the fragment shader. This prevents the z buffer from being written to and all is well. I use code like this in my billboard shader:
outColor = texture(global_textures[nonuniformEXT(texIndex)], fragTexCoord);
// discard transparent pixels (== do not write z-buffer)
if (outColor.w < 0.8) {
discard;
}

SamplerState issue in the pixel shader

I have a problem with the pixel shader, it compiles but does not renders anything and instead Directx gives out this error:
D3D11 ERROR: ID3D11DeviceContext::DrawIndexed: The Pixel Shader unit expects a Sampler configured for default filtering to be set at Slot 0, but the sampler bound at this slot is configured for comparison filtering. This mismatch will produce undefined behavior if the sampler is used (e.g. it is not skipped due to shader code branching). [ EXECUTION ERROR #390: DEVICE_DRAW_SAMPLER_MISMATCH].
Here's my shader:
struct PixelInput
{
float4 position: SV_POSITION;
float4 color : COLOR;
float2 UV: TEXCOORD0;
};
//globals
SamplerState ss;
Texture2D shaderTex;
float4 TexturePixelShader(PixelInput input) : SV_TARGET
{
float4 texColors;
texColors = shaderTex.Sample(ss, input.UV);
return texColors;
}
Sampler creation:
samplerDesc.Filter = D3D11_FILTER_COMPARISON_MIN_MAG_MIP_LINEAR;
samplerDesc.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.MipLODBias = 0.0f;
samplerDesc.MaxAnisotropy = 1;
samplerDesc.ComparisonFunc = D3D11_COMPARISON_ALWAYS;
samplerDesc.BorderColor[0] = 0;
samplerDesc.BorderColor[1] = 0;
samplerDesc.BorderColor[2] = 0;
samplerDesc.BorderColor[3] = 0;
samplerDesc.MinLOD = 0;
samplerDesc.MaxLOD = D3D11_FLOAT32_MAX;
result = device->CreateSamplerState(&samplerDesc, &m_SS);
if (FAILED(result))
return false;
return true;
and rendering function:
void TextureShader::RenderShader(ID3D11DeviceContext* ctxt, int indexCount)
{
ctxt->IASetInputLayout(m_layout);
ctxt->VSSetShader(m_vertexShader, NULL, 0);
ctxt->PSSetShader(m_pixelShader, NULL, 0);
ctxt->PSSetSamplers(0, 1, &m_SS);
ctxt->DrawIndexed(indexCount, 0, 0);
return;
}
You are declaring your sampler as a comparison sampler :
samplerDesc.Filter = D3D11_FILTER_COMPARISON_MIN_MAG_MIP_LINEAR;
It should be :
samplerDesc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
Comparison sampler are used mostly for shadow maps, and declare as following in hlsl :
SamplerComparisonState myComparisonSampler;
myTexture.SampleCmp(myComparisonSampler, texCoord);
SamplerState in HLSL is a "Effects" construct that only applies to fx_* profiles and using the EFfects for Direct3D 11 runtime.
For shader binding in your case, use:
sampler ss : register(s0);
Texture2D<flaot4> shaderTex : register(t0);

DirectX 11 Render To Texture

basically I am trying to render a scene to a texture as in this ogl tutorial here but in DirectX 11, and I faced some issues:
Absolutely nothing is rendered when I launch the program IDK why.
The only thing the texture displays 'correctly' is the clear color.
I have examined the executable in RenderDoc, and in the captured frame the back buffer draws the quad and the texture on it displays the scene correctly!
Source code peak:
D3D11_TEXTURE2D_DESC texDesc;
ZeroMemory(&texDesc, sizeof(D3D11_TEXTURE2D_DESC));
texDesc.Width = Data.Width;
texDesc.Height = Data.Height;
texDesc.Format = R32G32B32A32_FLOAT;
texDesc.Usage = D3D11_USAGE_DEFAULT;
texDesc.SampleDesc.Count = 1;
texDesc.SampleDesc.Quality = 0;
texDesc.CPUAccessFlags = 0;
texDesc.ArraySize = 1;
texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
texDesc.MiscFlags = 0;
texDesc.MipLevels = 1;
if (Data.Img_Data_Buf == NULL)
{
if (FAILED(DX11Context::GetDevice()->CreateTexture2D(&texDesc, NULL, &result->tex2D)))
{
Log.Error("[DirectX] Texture2D Creation Failed for Null-ed Texture2D!\n");
return;
}
D3D11_SHADER_RESOURCE_VIEW_DESC srvDesc;
srvDesc.Format = texDesc.Format;
srvDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
srvDesc.Texture2D.MostDetailedMip = 0;
srvDesc.Texture2D.MipLevels = 1;
DX11Context::GetDevice()->CreateShaderResourceView(result->tex2D, &srvDesc, &result->resourceView);
return;
}
//depth stencil texture
D3D11_TEXTURE2D_DESC texDesc;
{
texDesc.Width = size.x;
texDesc.Height = size.y;
texDesc.MipLevels = 1;
texDesc.ArraySize = 1;
texDesc.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
texDesc.SampleDesc.Count = 1;
texDesc.SampleDesc.Quality = 0;
texDesc.Usage = D3D11_USAGE_DEFAULT;
texDesc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
texDesc.CPUAccessFlags = 0;
texDesc.MiscFlags = 0;
}
if (FAILED(API::DirectX::DX11Context::GetDevice()->CreateTexture2D(&texDesc, nullptr, &depthstenciltex)))
{
Log.Error("[DX11RenderTarget] Failed to create DepthStencilTexture for render-target!\n");
//Return or the next call will fail too
return;
}
if (FAILED(API::DirectX::DX11Context::GetDevice()->CreateDepthStencilView(depthstenciltex, nullptr, &depthstencilview)))
{
Log.Error("[DX11RenderTarget] Failed to create DepthStencilView for render-target!\n");
}
//render target
D3D11_RENDER_TARGET_VIEW_DESC renderTargetViewDesc;
ZeroMemory(&renderTargetViewDesc, sizeof(D3D11_RENDER_TARGET_VIEW_DESC));
renderTargetViewDesc.Format = texDesc.Format;
renderTargetViewDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
renderTargetViewDesc.Texture2D.MipSlice = 0;
ID3D11RenderTargetView* rtv;
if (FAILED(API::DirectX::DX11Context::GetDevice()->CreateRenderTargetView(texture->tex2D, &renderTargetViewDesc, &rtv)))
{
Log.Error("[DX11RenderTarget] Failed to create render-target-view (RTV)!\n");
return;
}
//binding
Context->OMSetRenderTargets(1, &rtv, rt->depthstenciltex);
Shaders:
std::string VertexShader = R"(struct VertexInputType
{
float4 position : POSITION;
float2 tex : TEXCOORD;
};
struct PixelInputType
{
float4 position : SV_POSITION;
float2 tex : TEXCOORD;
};
cbuffer NE_Camera : register(b0)
{
matrix Model;
matrix View;
matrix Projection;
};
PixelInputType main(VertexInputType input)
{
PixelInputType output;
// Calculate the position of the vertex against the world, view, and projection matrices.
output.position = mul(Model, input.position);
output.position = mul(View, output.position);
output.position = mul(Projection, output.position);
// Store the input texture for the pixel shader to use.
output.tex = input.tex;
return output;
})";
std::string PixelShader = R"(
struct PixelInputType
{
float4 position : SV_POSITION;
float2 tex : TEXCOORD;
};
Texture2D NE_Tex_Diffuse : register(t0);
SamplerState NE_Tex_Diffuse_Sampler : register(s0);
float4 main(PixelInputType input) : SV_TARGET
{
return NE_Tex_Diffuse.Sample(NE_Tex_Diffuse_Sampler, input.tex);
}
)";
std::string ScreenVertexShader = R"(struct VertexInputType
{
float2 position : POSITION;
float2 tex : TEXCOORD;
};
struct PixelInputType
{
float4 position : SV_POSITION;
float2 tex : TEXCOORD;
};
PixelInputType main(VertexInputType input)
{
PixelInputType output;
// CalcSulate the position of the vertex against the world, view, and projection matrices.
output.position = float4(input.position.x,input.position.y,0.0f,1.0f);
// Store the input texture for the pixel shader to use.
output.tex = input.tex;
return output;
})";
std::string ScreenPixelShader = R"(
struct PixelInputType
{
float4 position : SV_POSITION;
float2 tex : TEXCOORD;
};
Texture2D ScreenTexture : register(t0);
SamplerState ScreenTexture_Sampler : register(s0);
float4 main(PixelInputType input) : SV_TARGET
{
return float4(ScreenTexture.Sample(ScreenTexture_Sampler, input.tex).rgb, 1.0f);
}
)";
Full Source Code
Also I captured a frame with visual studio graphics debugger, and noticed that the render to texture draw call has the PS shader with "stage didn't run, no output".
Note: I know that the scene should be flipped in DirectX.
I have found the bug causing this problem, I wasn't clearing the depth stencil view at rendering, I wonder why is clearing the DSV essential for RenderTarget output.

OpenGL: Multisampling texture y axis inverted?

I have a problem with a multisampled texture. It seems after blitting it to another surface for rendering, it's flipped upside down. What might cause that ? Should I provide some code ?
edit: Well, it's gonna be a lot of code, but here we go. This is how I create my surfaces / textures:
protected override void Create(int width, int height, SurfaceFormat format)
{
this.format = format;
bool multisample = format.Multisampling > 0;
int samples = Math.Max(0, Math.Min(format.Multisampling, 4));
format.TextureTarget = multisample ? TextureTarget.Texture2DMultisample : format.TextureTarget;
format.MipMapping = format.MipMapping && format.TextureTarget == TextureTarget.Texture2D;
Width = width;
Height = height;
textureHandle = GL.GenTexture();
//bind texture
GL.BindTexture(format.TextureTarget, textureHandle);
Log.Error("Bound Texture: " + GL.GetError());
if (format.TextureTarget == TextureTarget.Texture2D)
{
GL.TexParameter(format.TextureTarget, TextureParameterName.TextureMinFilter, (int)(format.MipMapping ? TextureMinFilter.LinearMipmapLinear : TextureMinFilter.Linear));
GL.TexParameter(format.TextureTarget, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);
GL.TexParameter(format.TextureTarget, TextureParameterName.TextureWrapS, (int)format.WrapMode);
GL.TexParameter(format.TextureTarget, TextureParameterName.TextureWrapT, (int)format.WrapMode);
}
Log.Debug("Created Texture Parameters: " + GL.GetError());
if (samples < 1)
GL.TexImage2D(format.TextureTarget, 0, format.InternalFormat, Width, Height, 0, format.PixelFormat, format.SourceType, format.Pixels);
else
GL.TexImage2DMultisample(TextureTargetMultisample.Texture2DMultisample, samples, format.InternalFormat, Width, Height, true);
if (format.MipMapping)
GL.GenerateMipmap(GenerateMipmapTarget.Texture2D);
Log.Debug("Created Image: " + GL.GetError());
//unbind texture
GL.BindTexture(format.TextureTarget, 0);
//create depthbuffer
if (format.DepthBuffer)
{
GL.GenRenderbuffers(1, out dbHandle);
GL.BindRenderbuffer(RenderbufferTarget.RenderbufferExt, dbHandle);
if(multisample)
GL.RenderbufferStorageMultisample(RenderbufferTarget.RenderbufferExt, samples, RenderbufferStorage.DepthComponent24, Width, Height);
else
GL.RenderbufferStorage(RenderbufferTarget.RenderbufferExt, RenderbufferStorage.DepthComponent24, Width, Height);
}
//create fbo
fboHandle = GL.GenFramebuffer();
GL.BindFramebuffer(FramebufferTarget.FramebufferExt, fboHandle);
GL.FramebufferTexture2D(FramebufferTarget.FramebufferExt, FramebufferAttachment.ColorAttachment0Ext, format.TextureTarget, textureHandle, 0);
if (format.DepthBuffer)
GL.FramebufferRenderbuffer(FramebufferTarget.FramebufferExt, FramebufferAttachment.DepthAttachmentExt, RenderbufferTarget.RenderbufferExt, dbHandle);
Log.Debug("Framebuffer status: " + GL.CheckFramebufferStatus(FramebufferTarget.FramebufferExt));
Log.Debug("Created Framebuffer: " + GL.GetError());
GL.BindFramebuffer(FramebufferTarget.FramebufferExt, 0);
}
creation:
var sf = SurfaceFormat.Surface2D;
sf.Multisampling = 4;
multisampler = new Surface(Window.Width, Window.Height, sf);
Now in the render loop I do the following:
//Render entire scene to multisampler
SceneRenderer.RenderMultisampled(ActiveCamera, multisampler, time);
//blit sampler to my material input texture
multisampler.CloneTo(postEffect.Textures["_tex"]);
//blit this texture to my "Canvas" (basically a surface with additional drawing methods. The canvas material is use as a texture for a quad in my scene, thus rendering a copy of the output image to a plane.
postEffect.Textures["_tex"].CloneTo(canvas.Surface);
//This would be the same but via rendering with a quad instead of blitting. Has the same result
//canvas.Clear();
//canvas.DrawMaterial(postEffect);
//clear framebuffer
GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0);
//Set viewport
GL.Viewport(0, 0, Window.Width, Window.Height);
//use material (bind shader & shader params) and draw the scene.
postEffect.Use();
Helper.DrawScreenQuad();
GL.UseProgram(0);
If this is not enough, I can also post the shaders & mesh code.
EDIT2: Okay everything is now working as expected EXCEPT when I use canvas.draw() instead of blitting the texture. The draw method looks like this:
public void DrawMaterial(Material material)
{
GL.Viewport(0, 0, Surface.Width, Surface.Height);
Surface.BindFramebuffer();
material.Use();
Helper.DrawScreenQuad();
GL.UseProgram(0);
GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0);
}
Draw screen quad:
public static void DrawScreenQuad()
{
GL.Begin(PrimitiveType.Quads);
GL.TexCoord2(0, 1);
GL.Vertex2(-1, -1);
GL.TexCoord2(1, 1);
GL.Vertex2(1, -1);
GL.TexCoord2(1, 0);
GL.Vertex2(1, 1);
GL.TexCoord2(0, 0);
GL.Vertex2(-1, 1);
GL.End();
}
Shader used:
[Shader vertex]
#version 150 core
in vec2 _pos;
out vec2 texCoord;
uniform float _time;
uniform sampler2D tex;
void main() {
gl_Position = vec4(_pos, 0, 1);
texCoord = _pos/2+vec2(0.5,0.5);
texCoord.y = 1 - texCoord.y;
}
[Shader fragment]
#version 150 core
#define PI 3.1415926535897932384626433832795
out vec4 outColor;
uniform float _time;
uniform sampler2D tex;
in vec2 texCoord;
//
void main() {
outColor = texture2D(tex, texCoord);
}
Somehow the rendered scene gets turned upside down by this. Why ?
I think I found my mistake. I had the texture coordinates AND the camera inverted. It seems to be fixed now. Why I still don't undestand is, why this works:
[Shader vertex]
#version 150 core
in vec2 _pos;
out vec2 texCoord;
uniform float _time;
uniform sampler2D tex;
void main() {
gl_Position = vec4(_pos, 0, 1);
texCoord = _pos/2+vec2(0.5,0.5);
//texCoord.y = 1 - texCoord.y;
}
[Shader fragment]
#version 150 core
#define PI 3.1415926535897932384626433832795
out vec4 outColor;
uniform float _time;
uniform sampler2D tex;
in vec2 texCoord;
//
void main() {
outColor = texture2D(tex, texCoord);
}
I would've expected that the y coordinate of the tex coord would need to be inverted.

Background face is visible over foreground face in same mesh while using a diffuse shader in DirectX

I am trying to create a simple diffuse shader to paint primitive objects in DirectX 9 and faced following problem. When I used a DirectX primitive object like a Torus or Teapot, some faces in the foreground part of the mesh is invisible. I don't think this is the same thing as faces being invisible as I cannot reproduce this behavior for primitive objects like Sphere or Box where no two quads have the same normal. Following are some screenshots in fill and wire-frame modes.
torus fill-mode
Following is my vertex deceleration code.
// vertex position...
D3DVERTEXELEMENT9 element;
element.Stream = 0;
element.Offset = 0;
element.Type = D3DDECLTYPE_FLOAT3;
element.Method = D3DDECLMETHOD_DEFAULT;
element.Usage = D3DDECLUSAGE_POSITION;
element.UsageIndex = 0;
m_vertexElement.push_back(element);
// vertex normal
element.Stream = 0;
element.Offset = 12; //3 floats * 4 bytes per float
element.Type = D3DDECLTYPE_FLOAT3;
element.Method = D3DDECLMETHOD_DEFAULT;
element.Usage = D3DDECLUSAGE_NORMAL;
element.UsageIndex = 0;
m_vertexElement.push_back(element);
And shader code in development.
float4x4 MatWorld : register(c0);
float4x4 MatViewProj : register(c4);
float4 matColor : register(c0);
struct VS_INPUT
{
float4 Position : POSITION;
float3 Normal : NORMAL;
};
struct VS_OUTPUT
{
float4 Position : POSITION;
float3 Normal : TEXCOORD0;
};
struct PS_OUTPUT
{
float4 Color : COLOR0;
};
VS_OUTPUT vsmain(in VS_INPUT In)
{
VS_OUTPUT Out;
float4 wpos = mul(In.Position, MatWorld);
Out.Position = mul(wpos, MatViewProj);
Out.Normal = normalize(mul(In.Normal, MatWorld));
return Out;
};
PS_OUTPUT psmain(in VS_OUTPUT In)
{
PS_OUTPUT Out;
float4 ambient = {0.1, 0.0, 0.0, 1.0};
float3 light = {1, 0, 0};
Out.Color = ambient + matColor * saturate(dot(light, In.Normal));
return Out;
};
I have also tried setting different render states for Depth-Stencil but wasn't successful.
project files
I figure it out! this is a Depth Buffer(Z-Buffer) issue, you can enable Z-Buffer in your code, either by fixed pipeline or in the shader.
To enable z-buffer in fixed pipeline:
First add the following code when creating D3D deivce
d3dpp.EnableAutoDepthStencil = TRUE ;
d3dpp.AutoDepthStencilFormat = D3DFMT_D16 ;
Then enable z-buffer before drawing
device->SetRenderState(D3DRS_ZENABLE, TRUE) ;
At last, clear z-buffer in render function
device->Clear( 0, NULL, D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER, D3DCOLOR_XRGB(0,0,0), 1.0f, 0 );