DirectX depth buffering not working - c++

For some strange reason my depth buffer is not working, i.e. the triangles drawn later always overlap, regardless of their position.
I have these presenter parameters
D3DPRESENT_PARAMETERS d3dpp;
ZeroMemory(&d3dpp, sizeof(d3dpp));
d3dpp.Windowed = TRUE;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
d3dpp.hDeviceWindow = mWindow;
d3dpp.BackBufferFormat = D3DFMT_X8R8G8B8;
d3dpp.BackBufferWidth = mScreenWidth;
d3dpp.BackBufferHeight = mScreenHeight;
d3dpp.EnableAutoDepthStencil = TRUE;
d3dpp.AutoDepthStencilFormat = D3DFMT_D16;
and these render states:
d3dDevice->SetRenderState(D3DRS_LIGHTING, TRUE); // turn off the 3D lighting
d3dDevice->SetRenderState(D3DRS_ZENABLE, TRUE); // turn on the z-buffer
d3dDevice->SetRenderState(D3DRS_NORMALIZENORMALS, TRUE);
d3dDevice->SetRenderState(D3DRS_AMBIENT, D3DCOLOR_XRGB(50, 50, 50)); // ambient light
edit:
thanks for replying. this is the rendering code code:
d3dDevice->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 0), 1.0f, 0);
d3dDevice->Clear(0, NULL, D3DCLEAR_ZBUFFER, D3DCOLOR_XRGB(0, 0, 0), 1.0f, 0);
d3dDevice->BeginScene();
// View transform
D3DXMATRIX matView;
D3DXMatrixLookAtLH(&matView,
&PlayerPos, // the camera position
&(LookAtRelative + PlayerPos), // the look-at position
&D3DXVECTOR3 (0.0f, 1.0f, 0.0f)); // the up direction
d3dDevice->SetTransform(D3DTS_VIEW, &matView);
// Projection transform
D3DXMATRIX matProjection;
D3DXMatrixPerspectiveFovLH(&matProjection,
D3DXToRadian(45), // the horizontal field of view
(FLOAT)mScreenWidth / (FLOAT)mScreenHeight, // aspect ratio
0.0f, // the near view-plane
1000.0f); // the far view-plane
d3dDevice->SetTransform(D3DTS_PROJECTION, &matProjection);
for (unsigned int i=0; i < mModels.size(); i++) {
mModels[i]->Draw();
}
d3dDevice->EndScene();
d3dDevice->Present(NULL, NULL, NULL, NULL);
and the Model::Draw() code is this:
void Model :: Draw () {
// Setup the world transform matrix
D3DXMATRIX matScale;
D3DXMATRIX matRotate;
D3DXMATRIX matTranslate;
D3DXMATRIX matWorldTransform;
D3DXMatrixScaling(&matScale, mScale->x, mScale->y, mScale->z);
D3DXMatrixRotationY(&matRotate, 0);
D3DXMatrixTranslation(&matTranslate, mPosition->x, mPosition->y, mPosition->z);
matWorldTransform = matScale * matRotate * matTranslate;
d3dDevice->SetTransform(D3DTS_WORLD, &matWorldTransform);
d3dDevice->SetFVF(CUSTOMFVF);
d3dDevice->SetStreamSource(0, vertexBuffer, 0, sizeof(CUSTOMVERTEX));
d3dDevice->SetIndices(indexBuffer);
d3dDevice->DrawIndexedPrimitive(D3DPT_TRIANGLELIST, 0, 0, vertexCount, 0, indexCount/3);
}
where vertexBuffer and indexBuffer with with their counts are attributes of the class.
Here are some screenshots (FU, spam protection):
1) http://img822.imageshack.us/img822/1705/dx2010080913182262.jpg this is the situation
2) http://img691.imageshack.us/img691/7358/dx2010080913183790.jpg this is the (correct) view when the cube is in front (the cube is drawn later)
3) http://img340.imageshack.us/img340/4720/dx2010080913184509.jpg But when I have the truncated pyramid in front, the cube still overlaps
it's easier to see when you move the camera yourself...

Now that's a gotcha. The problem was me setting the near view plane to 0.0f - when I changed it to something like 0.001f, the z-buffer suddenly started to work.

Related

How to rotate camera view with glm?

I'm trying to rotate my camera with the purpose of see an object rotating around my cam with a rotation Matrix that I develop the problem is that it doesn't works.
So I try with the glm::rotation matrix and put the values
m_View = glm::rotate(m_View, a * glm::radians(180.0f), glm::vec3(0.0f, 1.0f, 0.0f))
but it does not works either:
void CCam::setView()
{
Front = glm::normalize(Eye - At);
Right = glm::normalize(glm::cross(Up, Front));
up = glm::cross(Front, Right); // Up Verdadero
m_View = glm::lookAt(
Eye, // Camera Position
(Eye + Front), // Where the camera looks
up // This is another way to say camera is not rotated
);
newAt = glm::vec4(At, 1.0f);
//m_View = m_View * GLMatrixRotationY(a);
m_View = glm::rotate(m_View, a * glm::radians(180.0f), glm::vec3(0.0f, 1.0f, 0.0f));
}
glm::mat4 CCam::GLMatrixRotationX(float Angle)
{
matrizRotacionX = glm::mat4(
1, 0, 0, 0,
0, cos(Angle), -sin(Angle), 0,
0, sin(Angle), cos(Angle), 0,
0, 0, 0, 1
);
return matrizRotacionX;
}
I expect to see my mesh rotating around the camera but I only got the cam rotating around the mesh.

How would you produce a scrolling effect for an object?

I am using OpenGL in Visual C++ and I am displaying a rectangle using VBO and VAO and I have a basic vertex shader and fragment shader to apply a color and position for the object.
I would like to produce a scrolling effect of the object till I press the
Esc key. What would be the best way to do that?
I know that you can provide your Model, View and Projection matrices to your shader to perform the changes to the object. Especially the model matrix for performing the transformations. I can change the position of the object and rotate it but I am not sure how to produce a continuous or a scrolling effect of moving the object or rotating till a key is pressed.
My code so far:
GLfloat vb_data[] = {
-0.25f, 0.25f, 0.0f, // Top-left
0.25f, 0.25f, 0.0f, // Top-right
0.25f, -0.25f, 0.0f, // Bottom-right
-0.25f, -0.25f, 0.0f // Bottom-left
};
glGenBuffers(1, &vbo_ID);
glBindBuffer(GL_ARRAY_BUFFER, vbo_ID);
glBufferData(GL_ARRAY_BUFFER, sizeof(vb_data), vb_data, GL_DYNAMIC_DRAW);
GLfloat incre = 0.01f;
glClear(GL_COLOR_BUFFER_BIT);
while (1){
glm::mat4 trans = glm::mat4(1.0f);
glm::mat4 transBar = glm::translate(trans, glm::vec3(0.2f, 0.2f, 1.0f));
glm::mat4 MVP = transBar;
glUseProgram(pgID);
glUniformMatrix4fv(MatrixID, 1, GL_FALSE, &MVP[0][0]);
glBindBuffer(GL_ARRAY_BUFFER, vbo_ID);
glEnableVertexAttribArray(0);
glVertexAttribPointer(
0,
3,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
glDrawArrays(GL_QUADS,0, 4);
glDisableVertexAttribArray(0);
SwapBuffers(g_pOpenGLWindow->hDC);
}
but I am not sure how to produce a continuous or a scrolling effect of moving the object or rotating ...
To generate a smooth motion (translation or rotation), the translation or rotation part of the model matrix has to be changed by small amount respectively angle in every frame. This can be achieved by control variables which are incremented by a small step in every frame. The model matrix has to be recalculated, by the use of this control variables in every frame.
See the concept in the following pseudo code:
translate_x = 0;
angle = 0;
while (1)
model_matrix = translate(translate_x, 0, 0) * roatateX(angle)
translate_x = translate_x + step_x
angle = angle + step_angle
Try the following code and play around with the parameters, to achieve the effect for your needs:
float angle = 0.0f;
float step_ang = 1.0f;
float trans_x = -0.5f;
float step_x = 0.01f;
bool move_forward = true;
while (1)
{
glm::mat4 model = glm::mat4(1.0f);
model = glm::translate(model, glm::vec3(trans_x, 0.2f, 1.0f));
model = glm::rotate(model, glm::radians(angle), glm::vec3(0.0f, 0.0f, 1.0f));
angle += step_ang;
trans_x += move_forward ? step_x : -step_x;
move_forward = move_forward ? trans_x < 0.5f : trans_x <= -0.5f;
glm::mat4 MVP = model;
.....
... till a key is pressed.
Since the question is tagged winapi, I assume that you have initilized a WNDCLASSEX data structure and that you have set the WindowProc callback function to the member lpfnWndProc.
You have to implement the WM_KEYDOWN message event in the window callback function and to check if the Esc (VK_ESCAPE) was pressed:
global variable:
bool esc_pressed = false;
Call back function:
LRESULT CALLBACK WindowProcedure( HWND hWnd, unsigned int msg, WPARAM wparam, LPARAM lparam )
{
switch(msg)
{
case WM_KEYDOWN:
if ( wparam == VK_ESCAPE )
esc_pressed = true;
break;
// other messages
// ...
}
return DefWindowProc( hWnd, msg, wparam, lparam );
}
Further you have to handle the window messages in the main loop. See GetMessage function and DispatchMessage function.
instead of
while (1)
{
.....
}
you have to
MSG msg;
while( GetMessage( &msg, 0, 0, 0 ) )
{
DispatchMessage( &msg );
.....
}

Setting Constant Buffer Directx

So, I'm trying to get my constant buffer in my shader to have a projection matrix that is orthogonal...
Can anyone tell me why I'm rendering nothing now that I tried to do this?
I assumed I needed to make two mapped subresources, and two buffers, one for vertex and the other for constant, maybe this is wrong?
Here is my code:
Vertex OurVertices[] =
{
{ D3DXVECTOR2(-0.5f,-0.5f), D3DXCOLOR(0.0f, 0.0f, 0.0f, 1.0f) },
{ D3DXVECTOR2(-0.5f,0.5f), D3DXCOLOR(0.0f, 1.0f, 0.0f, 1.0f) },
{ D3DXVECTOR2(0.5f,0.5f), D3DXCOLOR(0.0f, 0.0f, 1.0f, 1.0f) },
{ D3DXVECTOR2(-0.5f,-0.5f), D3DXCOLOR(1.0f, 0.0f, 0.0f, 1.0f) },
{ D3DXVECTOR2(0.5f,0.5f), D3DXCOLOR(0.0f, 1.0f, 0.0f, 1.0f) },
{ D3DXVECTOR2(0.5f,-0.5f), D3DXCOLOR(0.0f, 0.0f, 1.0f, 1.0f) }
};
// create the vertex buffer
D3D11_BUFFER_DESC vertexBufferDesc;
ZeroMemory(&vertexBufferDesc, sizeof(vertexBufferDesc));
vertexBufferDesc.Usage = D3D11_USAGE_DYNAMIC; // write access access by CPU and GPU
vertexBufferDesc.ByteWidth = sizeof(Vertex) * 6; // size is the VERTEX struct * 3
vertexBufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER; // use as a vertex buffer
vertexBufferDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; // allow CPU to write in buffer
dev->CreateBuffer(&vertexBufferDesc, NULL, &pVBuffer); // create the buffer
D3D11_MAPPED_SUBRESOURCE ms_Vertex;
devcon->Map(pVBuffer, 0, D3D11_MAP_WRITE_DISCARD, 0, &ms_Vertex);
memcpy(ms_Vertex.pData, OurVertices, sizeof(OurVertices)); // copy the data
devcon->Unmap(pVBuffer, NULL); // unmap the buffer
devcon->VSSetConstantBuffers(NULL, 1, &pVBuffer); // Finanly set the constant buffer in the vertex shader with the updated values.
MatrixBufferType* dataPtr;
D3D11_BUFFER_DESC constantBufferDesc; // create the constant buffer
ZeroMemory(&constantBufferDesc, sizeof(constantBufferDesc));
constantBufferDesc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
constantBufferDesc.Usage = D3D11_USAGE_DYNAMIC;
constantBufferDesc.ByteWidth = sizeof(MatrixBufferType);
constantBufferDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
constantBufferDesc.MiscFlags = 0;
constantBufferDesc.StructureByteStride = 0;
dev->CreateBuffer(&constantBufferDesc, NULL, &pCBuffer); // create the buffer
D3D11_MAPPED_SUBRESOURCE ms_CBuffer;
devcon->Map(pCBuffer, 0, D3D11_MAP_WRITE_DISCARD, 0, &ms_CBuffer);
D3DXMatrixOrthoLH(&m_orthoMatrix, SCREEN_WIDTH, SCREEN_HEIGHT, 0, 1);
D3DXMatrixTranspose(&m_orthoMatrix, &m_orthoMatrix);
dataPtr = (MatrixBufferType*)ms_CBuffer.pData;
dataPtr->projection = m_orthoMatrix;
memcpy(ms_CBuffer.pData, &dataPtr, sizeof(MatrixBufferType));
devcon->Unmap(pCBuffer, NULL);
devcon->VSSetConstantBuffers(NULL, 1, &pCBuffer); // Finally set the constant buffer in the vertex shader with the updated values.
So, attempting to put my projection matrix to use in my shader code:
cbuffer ConstantBuffer
{
matrix world;
matrix view;
matrix projection;
};
VOut VShader(float4 position : POSITION, float4 color : COLOR)
{
VOut output;
output.position = position;
output.position = mul(output.position, projection);
output.color = color;
return output;
}
Causes my quad which was originally rendering, to disappear.
This leads me to believe I'm doing something wrong, I just don't know what yet.

D3D11 DrawIndexed() is drawing to the wrong render target

I'm attempting to render a scene to two textures (left and right) for use with the Oculus Rift. When I set the render target to a 2D texture render view and call DrawIndexed() it renders to the back buffer instead of the texture. I'm using Visual Studio, and I've run the Graphics Diagnostics on it. On the DrawIndexed() event, it shows the render target is the texture, but the pixel history doesn't show the event. If I don't clear the backbuffer, the scene shows up on the screen.
In the following code, the RenderLeft() function should render an image to a plane on a green background with the render target set as the left render texture. Then RenderRight() should take the texture rendered by RenderLeft(), and render it to the plane, then output that on the back buffer. (Note: This isn't the normal set up. This is just to help see if the texture is being rendered to or not)
In the final output, there should be nothing on the left side of the screen, and on the right should be the source image inside a green rectangle on a black background.
Instead, I get this: http://i.imgur.com/dHX5Ed3.png?1
RenderLeft is rendering to the back buffer, even though the render target is a texture, so then the texture used by RenderRight is just the color used to clear it.
Here is the code I'm currently using. I think I've included everything that's relevant.
// this is the function used to render a single frame
void Direct3D::RenderFrame()
{
CreateTransforms(); //this creates matFinalLeft and matFinalRight, which is (world matrix)*(view matrix)*(projection matrix) with the proper offsets for a stereoscopic view.
setVertices(); //this sets the vertex and index buffers.
setMainShaders(); // this sets the shaders used to render the 3D scene
RenderLeft(pTextureLeftRenderView, matFinalLeft, viewportLeft, true); //this renders an image to a plane on a green background. It SHOULD render to a texture.
RenderRight(backbuffer, matFinalRight, viewportRight, false);//this renders the render target from RenderLeft to the plane and renders to the back buffer.
swapchain->Present(0, 0); //output back buffer to screen.
}
This section should render a rectangle textured with an image to the left side of the render texture.
//Render the scene to the left side of a texture
void Direct3D::RenderLeft(ID3D11RenderTargetView *RenderTarget, D3DXMATRIX matFinal, D3D11_VIEWPORT viewport, bool clearRenderTarget){
devcon->OMSetRenderTargets(1, &RenderTarget, zbuffer);
devcon->RSSetViewports(1, &viewport);
// update shader resources
devcon->UpdateSubresource(pCBufferPrimaryShader, 0, 0, &matFinal, 0, 0);
devcon->PSSetShaderResources(0, 1, &pTextureLeftResourceView);
// clear the depth buffer and render target texture
devcon->ClearDepthStencilView(zbuffer, D3D11_CLEAR_DEPTH, 1.0f, 0);
if (clearRenderTarget){
devcon->ClearRenderTargetView(RenderTarget, D3DXCOLOR(0.0f, 1.0f, 0.0f, 1.0f));
}
// render to texture on left side (oculus) or full texture
devcon->DrawIndexed(6, 0, 0);
}
This section should render a rectangle with the texture from RenderLeft() to the back buffer.
//Render the scene to the right side of the back buffer
void Direct3D::RenderRight(ID3D11RenderTargetView *RenderTarget, D3DXMATRIX matFinal, D3D11_VIEWPORT viewport, bool clearRenderTarget){
//render to texture
devcon->OMSetRenderTargets(1, &RenderTarget, zbuffer);
devcon->RSSetViewports(1, &viewport);
// update shader resources
devcon->UpdateSubresource(pCBufferPrimaryShader, 0, 0, &matFinal, 0, 0);
devcon->PSSetShaderResources(0, 1, &pRenderTextureLeftResourceView);
// clear the depth buffer and render target texture
devcon->ClearDepthStencilView(zbuffer, D3D11_CLEAR_DEPTH, 1.0f, 0);
if (clearRenderTarget){
devcon->ClearRenderTargetView(RenderTarget, D3DXCOLOR(0.0f, 0.0f, 1.0f, 1.0f));
}
// render to texture on left side (oculus) or full texture
devcon->DrawIndexed(6, 0, 0);
}
Finally, the code that creates the various views and viewports
void Direct3D::InitD3D(HWND hWnd)
{
// create a struct to hold information about the swap chain
DXGI_SWAP_CHAIN_DESC scd;
// clear out the struct for use
ZeroMemory(&scd, sizeof(DXGI_SWAP_CHAIN_DESC));
// fill the swap chain description struct
scd.BufferCount = 1; // one back buffer
scd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; // use 32-bit color
scd.BufferDesc.Width = screen_width;
scd.BufferDesc.Height = screen_height;
scd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT; // how swap chain is to be used
scd.OutputWindow = hWnd; // the window to be used
scd.SampleDesc.Count = 4; // how many multisamples
scd.Windowed = TRUE; // windowed/full-screen mode
scd.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH;
// create a device, device context and swap chain using the information in the scd struct
D3D11CreateDeviceAndSwapChain(NULL,
D3D_DRIVER_TYPE_HARDWARE,
NULL,
NULL,
NULL,
NULL,
D3D11_SDK_VERSION,
&scd,
&swapchain,
&dev,
NULL,
&devcon);
// create the depth buffer texture
D3D11_TEXTURE2D_DESC texd;
ZeroMemory(&texd, sizeof(texd));
texd.Width = screen_width;
texd.Height = screen_height;
texd.ArraySize = 1;
texd.MipLevels = 1;
texd.SampleDesc.Count = 4;
texd.Format = DXGI_FORMAT_D32_FLOAT;
texd.BindFlags = D3D11_BIND_DEPTH_STENCIL;
ID3D11Texture2D *pDepthBuffer;
dev->CreateTexture2D(&texd, NULL, &pDepthBuffer);
// create the depth buffer
D3D11_DEPTH_STENCIL_VIEW_DESC dsvd;
ZeroMemory(&dsvd, sizeof(dsvd));
dsvd.Format = DXGI_FORMAT_D32_FLOAT;
dsvd.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2DMS;
dev->CreateDepthStencilView(pDepthBuffer, &dsvd, &zbuffer);
pDepthBuffer->Release();
// get the address of the back buffer
ID3D11Texture2D *pBackBuffer;
swapchain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&pBackBuffer);
// use the back buffer address to create the render target
dev->CreateRenderTargetView(pBackBuffer, NULL, &backbuffer);
pBackBuffer->Release();
//create intermediate render textures
ID3D11Texture2D *pRenderTextureLeft;
D3D11_TEXTURE2D_DESC textureDesc;
D3D11_RENDER_TARGET_VIEW_DESC renderTargetViewDesc;
D3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc;
ZeroMemory(&textureDesc, sizeof(textureDesc));
textureDesc.Width = screen_width;
textureDesc.Height = screen_height;
if (oculus){
textureDesc.Width = (UINT)((FLOAT)textureDesc.Width * oculus->renderScale);
textureDesc.Height = (UINT)((FLOAT)textureDesc.Height *oculus->renderScale);
}
textureDesc.MipLevels = 1;
textureDesc.ArraySize = 1;
textureDesc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
textureDesc.SampleDesc.Count = 1;
textureDesc.Usage = D3D11_USAGE_DEFAULT;
textureDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
textureDesc.CPUAccessFlags = 0;
textureDesc.MiscFlags = 0;
dev->CreateTexture2D(&textureDesc, NULL, &pRenderTextureLeft);
renderTargetViewDesc.Format = textureDesc.Format;
renderTargetViewDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
renderTargetViewDesc.Texture2D.MipSlice = 0;
dev->CreateRenderTargetView(pRenderTextureLeft, &renderTargetViewDesc, &pTextureLeftRenderView);
shaderResourceViewDesc.Format = textureDesc.Format;
shaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
shaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
shaderResourceViewDesc.Texture2D.MipLevels = 1;
dev->CreateShaderResourceView(pRenderTextureLeft, &shaderResourceViewDesc, &pRenderTextureLeftResourceView);
ID3D11Texture2D *pRenderTextureRight;
dev->CreateTexture2D(&textureDesc, NULL, &pRenderTextureRight);
dev->CreateRenderTargetView(pRenderTextureRight, &renderTargetViewDesc, &pTextureRightRenderView);
dev->CreateShaderResourceView(pRenderTextureRight, &shaderResourceViewDesc, &pRenderTextureRightResourceView);
/*if (oculus){
pOculusOutputDevice = oculus->searchForOculusDisplay(oculus->hmd.DisplayDeviceName);
swapchain->SetFullscreenState(TRUE, pOculusOutputDevice);
}*/
// Set the viewport
ZeroMemory(&viewportLeft, sizeof(D3D11_VIEWPORT));
ZeroMemory(&viewportRight, sizeof(D3D11_VIEWPORT));
ZeroMemory(&viewportCenter, sizeof(D3D11_VIEWPORT));
viewportCenter.TopLeftX = 0.0f;
viewportCenter.TopLeftY = 0.0f;
if (oculus){
viewportCenter.Width = (FLOAT)screen_width*oculus->renderScale;
viewportCenter.Height = (FLOAT)screen_height*oculus->renderScale;
}
else{
viewportCenter.Width = (FLOAT)screen_width;
viewportCenter.Height = (FLOAT)screen_height;
}
viewportCenter.MinDepth = 0.0f;
viewportCenter.MaxDepth = 1.0f;
if (dual_mode){
viewportLeft.TopLeftX = 0.0f;
viewportLeft.TopLeftY = 0.0f;
viewportLeft.Width = (FLOAT)screen_width / 2.0f;
viewportLeft.Height = (FLOAT)screen_height;
viewportLeft.MinDepth = 0.0f;
viewportLeft.MaxDepth = 1.0f;
viewportRight.TopLeftX = (FLOAT)screen_width / 2.0f;
viewportRight.TopLeftY = 0.0f;
viewportRight.Width = (FLOAT)screen_width / 2.0f;
viewportRight.Height = (FLOAT)screen_height;
viewportRight.MinDepth = 0.0f;
viewportRight.MaxDepth = 1.0f;
}
devcon->RSSetViewports(1, &viewportCenter);
InitPipeline();
InitGraphics();
}
Per request, here is some more code:
I'm including the entire Direct3D class header, so you can see what are and are not member variables.
#pragma once
#include "Oculus.h"
#include <OVR.h>
#include "Camera.h"
#include <d3d11.h>
#include <D3DX11.h>
#include <D3DX10.h>
#pragma comment (lib, "d3d11.lib")
#pragma comment (lib, "d3dx11.lib")
#pragma comment (lib, "d3dx10.lib")
class Direct3D
{
public:
struct VERTEX{ FLOAT X, Y, Z; D3DXCOLOR Color; FLOAT U, V; };
struct DISTORTION{
FLOAT LensCenter[2];
FLOAT ScreenCenter[2];
FLOAT Scale[2];
FLOAT ScaleIn[2];
FLOAT HmdWarpParam[4];
};
IDXGISwapChain *swapchain; // the pointer to the swap chain interface
ID3D11Device *dev; // the pointer to our Direct3D device interface
ID3D11DeviceContext *devcon; // the pointer to our Direct3D device context
ID3D11RenderTargetView *backbuffer;
IDXGIOutput* pOculusOutputDevice;
ID3D11VertexShader *pVS_Primary; // the vertex shader
ID3D11PixelShader *pPS_Primary; // the pixel shader
ID3D11VertexShader *pVS_Distortion;
ID3D11PixelShader *pPS_Distortion; // the pixel shader
ID3D11Buffer *pVBuffer; //vertec buffer
ID3D11Buffer *pIBuffer;
ID3D11InputLayout *pLayout_Primary;
ID3D11InputLayout *pLayout_Distortion;
D3D11_VIEWPORT viewportLeft;
D3D11_VIEWPORT viewportRight;
D3D11_VIEWPORT viewportCenter;
ID3D11Buffer *pCBufferPrimaryShader;
ID3D11Buffer *pCBufferDistortionShader;
ID3D11DepthStencilView *zbuffer; // the pointer to our depth buffer
ID3D11ShaderResourceView *pTextureLeftResourceView; // the pointer to the texture
ID3D11ShaderResourceView *pTextureRightResourceView;
ID3D11ShaderResourceView *pRenderTextureLeftResourceView;
ID3D11ShaderResourceView *pRenderTextureRightResourceView;
ID3D11RenderTargetView *pTextureLeftRenderView;
ID3D11RenderTargetView *pTextureRightRenderView;
D3DXMATRIX matFinalLeft;
D3DXMATRIX matFinalRight;
Camera cameraLeft, cameraRight;
int screen_width;
int screen_height;
bool dual_mode;
Oculus* oculus;
Direct3D(Oculus* oculus);
Direct3D();
~Direct3D();
void InitD3D(HWND hWnd); // sets up and initializes Direct3D
void CleanD3D(void); // closes Direct3D and releases memory
void RenderFrame();
void InitPipeline();
void InitGraphics();
void RenderLeft(ID3D11RenderTargetView *RenderTarget, D3DXMATRIX matFinal, D3D11_VIEWPORT viewport, bool clearRenderTarget);
void RenderRight(ID3D11RenderTargetView *RenderTarget, D3DXMATRIX matFinal, D3D11_VIEWPORT viewport, bool clearRenderTarget);
void DistortionCorrection(ID3D11RenderTargetView *RenderTarget);
void CreateTransforms();
void setVertices();
void setMainShaders();
void OVRMatrix4fToD3DXMatrix(OVR::Matrix4f& source, D3DXMATRIX& dest);
};
And here is the code that initializes the image textures (right now they load the same image to two different textures. It's eventually going to be the two sides of the 3D image. Just as soon as i figure out how to access the second image in the file)
FILENAME is #defined as the name of the image file I'm displaying
void Direct3D::InitGraphics()
{
D3DX11CreateShaderResourceViewFromFile(dev, // the Direct3D device
FILENAME, // load Wood.png in the local folder
NULL, // no additional information
NULL, // no multithreading
&pTextureLeftResourceView, // address of the shader-resource-view
NULL); // no multithreading
D3DX11CreateShaderResourceViewFromFile(dev, // the Direct3D device
FILENAME, // load Wood.png in the local folder
NULL, // no additional information
NULL, // no multithreading
&pTextureRightResourceView, // address of the shader-resource-view
NULL); // no multithreading
// get image size for rectangle mesh size
D3DX11_IMAGE_INFO info;
D3DX11GetImageInfoFromFile(FILENAME, NULL, &info, NULL);
FLOAT textureWidth = info.Width*0.001f;
FLOAT textureHeight = info.Height*0.001f;
// create vertices to represent the corners of the cube
VERTEX OurVertices[] =
{
{ -textureWidth, -textureHeight, 2.0f, D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f), 1.0f, 1.0f },
{ textureWidth, -textureHeight, 2.0f, D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f), 0.0f, 1.0f },
{ -textureWidth, textureHeight, 2.0f, D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f), 1.0f, 0.0f },
{ textureWidth, textureHeight, 2.0f, D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f), 0.0f, 0.0f }
};
// create the vertex buffer
D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DYNAMIC;
bd.ByteWidth = sizeof(VERTEX)* 4;
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
dev->CreateBuffer(&bd, NULL, &pVBuffer);
// copy the vertices into the buffer
D3D11_MAPPED_SUBRESOURCE ms;
devcon->Map(pVBuffer, NULL, D3D11_MAP_WRITE_DISCARD, NULL, &ms); // map the buffer
memcpy(ms.pData, OurVertices, sizeof(OurVertices)); // copy the data
devcon->Unmap(pVBuffer, NULL);
// create the index buffer out of DWORDs
DWORD OurIndices[] =
{
0, 1, 2, // side 1
2, 1, 3,
};
// create the index buffer
bd.Usage = D3D11_USAGE_DYNAMIC;
bd.ByteWidth = sizeof(DWORD)* 6;
bd.BindFlags = D3D11_BIND_INDEX_BUFFER;
bd.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
bd.MiscFlags = 0;
dev->CreateBuffer(&bd, NULL, &pIBuffer);
devcon->Map(pIBuffer, NULL, D3D11_MAP_WRITE_DISCARD, NULL, &ms); // map the buffer
memcpy(ms.pData, OurIndices, sizeof(OurIndices)); // copy the data
devcon->Unmap(pIBuffer, NULL);
}
And just in case you need it, here is the initialization of the rendering pipeline.
void Direct3D::InitPipeline()
{
// compile the shaders
ID3D10Blob *VS_Primary, *PS_Primary, *VS_Distortion, *PS_Distortion;
D3DX11CompileFromFile("vs_primary.hlsl", 0, 0, "VShader", "vs_5_0", 0, 0, 0, &VS_Primary, 0, 0);
D3DX11CompileFromFile("ps_primary.hlsl", 0, 0, "PShader", "ps_5_0", 0, 0, 0, &PS_Primary, 0, 0);
D3DX11CompileFromFile("vs_distortion.hlsl", 0, 0, "VShader", "vs_5_0", 0, 0, 0, &VS_Distortion, 0, 0);
D3DX11CompileFromFile("ps_distortion.hlsl", 0, 0, "main", "ps_5_0", 0, 0, 0, &PS_Distortion, 0, 0);
// create the shader objects
dev->CreateVertexShader(VS_Primary->GetBufferPointer(), VS_Primary->GetBufferSize(), NULL, &pVS_Primary);
dev->CreatePixelShader(PS_Primary->GetBufferPointer(), PS_Primary->GetBufferSize(), NULL, &pPS_Primary);
dev->CreateVertexShader(VS_Distortion->GetBufferPointer(), VS_Distortion->GetBufferSize(), NULL, &pVS_Distortion);
dev->CreatePixelShader(PS_Distortion->GetBufferPointer(), PS_Distortion->GetBufferSize(), NULL, &pPS_Distortion);
// set the shader objects
devcon->VSSetShader(pVS_Primary, 0, 0);
devcon->PSSetShader(pPS_Primary, 0, 0);
// create the input element object
D3D11_INPUT_ELEMENT_DESC ied[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "COLOR", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 28, D3D11_INPUT_PER_VERTEX_DATA, 0 },
};
// use the input element descriptions to create the input layout
dev->CreateInputLayout(ied, 3, VS_Primary->GetBufferPointer(), VS_Primary->GetBufferSize(), &pLayout_Primary);
devcon->IASetInputLayout(pLayout_Primary);
dev->CreateInputLayout(ied, 3, VS_Distortion->GetBufferPointer(), VS_Distortion->GetBufferSize(), &pLayout_Distortion);
devcon->IASetInputLayout(pLayout_Distortion);
// create the constant buffer
D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = 64;
bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
dev->CreateBuffer(&bd, NULL, &pCBufferPrimaryShader);
devcon->VSSetConstantBuffers(0, 1, &pCBufferPrimaryShader);
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = 48;
bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
dev->CreateBuffer(&bd, NULL, &pCBufferDistortionShader);
}
Pixel Shader:
Texture2D Texture;
SamplerState ss;
float4 PShader(float4 color : COLOR, float2 texcoord : TEXCOORD0) : SV_TARGET
{
return color * Texture.Sample(ss, texcoord);
}
Vertex Shader:
cbuffer ConstantBuffer
{
float4x4 matFinal;
}
struct VOut
{
float4 color : COLOR;
float2 texcoord : TEXCOORD0;
float4 position : SV_POSITION;
};
VOut VShader(float4 position : POSITION, float4 color : COLOR, float2 texcoord : TEXCOORD0)
{
VOut output;
output.position = mul(matFinal, position);
output.color = color;
output.texcoord = texcoord;
return output;
}
From the following code, I didn't see how you pass the texture from RenderLeft() to RenderRight(). You just pass backbuffer to RenderRight().
RenderLeft(pTextureLeftRenderView, matFinalLeft, viewportLeft, true);
RenderRight(backbuffer, matFinalRight, viewportRight, false);
So the result is the texture rendered to the left viewport and the right viewport only show the color(green) of the backbuffer.

how to convert XMMATRIX to D3DMATRIX in DirectX 9?

I learn DirectX (DirectX 9) from www.directxtutorial.com and using visual studio 2012 in windows 8.
d3dx9 (d3dx) replace by other header like DirectXMath, therefore I replaced all that is needed, but there is a problem - convert XMMATRIX to D3DMATRIX.
The problem code (The problem written - /problem!/):
void render_frame(void) {
// clear the window to a deep blue
d3ddev->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 0), 1.0f, 0);
d3ddev->BeginScene(); // begins the 3D scene
// select which vertex format we are using
d3ddev->SetFVF(CUSTOMFVF);
// SET UP THE PIPELINE
DirectX::XMMATRIX matRotateY; // a matrix to store the rotation information
static float index = 0.0f; index+=0.05f; // an ever-increasing float value
// build a matrix to rotate the model based on the increasing float value
matRotateY = DirectX::XMMatrixRotationY(index);
D3DMATRIX D3DMatRotateY = matRotateY.r;
// tell Direct3D about our matrix
d3ddev->SetTransform(D3DTS_WORLD, &matRotateY); /*problem!*/
DirectX::XMMATRIX matView; // the view transform matrix
DirectX::XMVECTOR CameraPosition = {0.0f,0.0f,10.0f};
DirectX::XMVECTOR LookAtPosition = {0.0f,0.0f,0.0f};
DirectX::XMVECTOR TheUpDirection = {0.0f,1.0f,0.0f};
matView = DirectX::XMMatrixLookAtLH(CameraPosition, // the camera position
LookAtPosition, // the look-at position
TheUpDirection); // the up direction
d3ddev->SetTransform(D3DTS_VIEW, &matView); /*problem!*/ // set the view transform to matView
DirectX::XMMATRIX matProjection; // the projection transform matrix
DirectX::XMMatrixPerspectiveFovLH(&matProjection,
DirectX::XMConvertToRadians(45), // the horizontal field of view
1.0f, // the near view-plane
100.0f); // the far view-plane
d3ddev->SetTransform(D3DTS_PROJECTION, &matProjection); /*problem!*/ // set the projection
// select the vertex buffer to display
d3ddev->SetStreamSource(0, v_buffer, 0, sizeof(CUSTOMVERTEX));
// copy the vertex buffer to the back buffer
d3ddev->DrawPrimitive(D3DPT_TRIANGLELIST, 0, 1);
d3ddev->EndScene(); // ends the 3D scene
d3ddev->Present(NULL, NULL, NULL, NULL); /* displays the created frame on the screen */ }
You can use XMStoreFloat4x4 to convert XMMATRIX to a XMFLOAT4X4.
You should be able to pass in XMFLOAT4X4 to setTransform by casting.
DirectX::XMMATRIX matProjection;  
DirectX::XMFLOAT4X4 projectionMatrix;
DirectX::XMMatrixPerspectiveFovLH(&matProjection,DirectX::XMConvertToRadians(45),1.0f,100.0f);
XMStoreFloat4x4(&projectionMatrix, matProjection);
d3ddev->SetTransform(D3DTS_PROJECTION, (D3DXMATRIX*)&projectionMatrix);  /*problem!*/   // set the projection