Having issues adding textures to my model, directx11 - c++

Okay so loading in a model works fine but when I try to add a texture or lighting they become stretched, I think it is having problems sharing vertices. I have tried changing the sampler_desc address as well as messing around with the input layout but no luck. The results
HRESULT Model::CreateTextureSampler(char* filename) {
hr = D3DX11CreateShaderResourceViewFromFile(m_pD3DDevice, filename,
NULL, NULL,
&m_pTexture0, NULL);
if (FAILED(hr)) // Dont fail if error is just a warning
return hr;
D3D11_SAMPLER_DESC sampler_desc;
ZeroMemory(&sampler_desc, sizeof(sampler_desc));
sampler_desc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
sampler_desc.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
sampler_desc.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
sampler_desc.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
sampler_desc.MinLOD = 0;
sampler_desc.MaxLOD = D3D11_FLOAT32_MAX;
hr = m_pD3DDevice->CreateSamplerState(&sampler_desc, &m_pSampler0);
if (FAILED(hr)) // Dont fail if error is just a warning
return hr;
}
HRESULT Model::CreateInputLayout()
{
// Create and set the input layout object
D3D11_INPUT_ELEMENT_DESC m_iedesc[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0,0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "COLOR", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "NORMAL", 0,DXGI_FORMAT_R32G32B32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 }
};
hr = m_pD3DDevice->CreateInputLayout(m_iedesc, 4, M_VS->GetBufferPointer(),
M_VS->GetBufferSize(), &m_pInputLayout);
if (FAILED(hr)) // Dont fail if error is just a warning
return hr;
m_pImmediateContext->IASetInputLayout(m_pInputLayout);
m_pImmediateContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
return S_OK;
}

When I'm looking at your line of code and I see this line here:
"TEXCOORD", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
You are passing to the input element descriptor for the tex coord with a third parameter of DXGI_FORMAT_R32G32B32A32_FLOAT. You might want to check this flag to see if this is causing your problem.
Normally texture coords are a vec2 object when using simple textures, unless if you are using a 3D Texture.
You might want to change this to DXGI_FORMAT_R32G32_FLOAT, and give the appropriate [S,T] or [P,Q] vertices for the texture file. Textures are normally images stored in memory linearly but are representing a MxN structure.

Related

DX11 vertices not drawn over a certain number

I'm trying to render a fullscreen tessellated mesh. It works as expected for up to around 250x250 vertices, but past that, it cuts off after the top half or so (depending on how tessellated I set the mesh).
I have a vertex and index vector, which I populate programmatically as a trianglelist. I suspect it might be related to the buffer size of the vertex buffer, since the number of vertices/indices seems right, but when I try to increase the bytewidth allocation, I get a memory access violation.
The vertex data struct defined as such:
struct SimpleVertex
{
XMFLOAT3 Pos;
XMFLOAT2 Tex;
};
To initialize the buffers, I use:
D3D11_BUFFER_DESC bd;
bd.Usage = D3D11_USAGE_DEFAULT;
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = 0;
...
D3D11_SUBRESOURCE_DATA InitData;
InitData.pSysMem = &(meshVertices[0]);
...
D3D11_INPUT_ELEMENT_DESC meshLayout[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 },
};
Vertex buffer:
...
bd.ByteWidth = sizeof(SimpleVertex) * (UINT)(meshVertices.size());
...
hr = g_pd3dDevice->CreateBuffer(&bd, &InitData, &g_pMeshVertexBuffer);
Index buffer:
...
bd.ByteWidth = sizeof(WORD) * (UINT)(meshIndices.size());
...
hr = g_pd3dDevice->CreateBuffer(&bd, &InitData, &g_pMeshIndexBuffer);
Draw call:
void RenderMesh(){
g_pImmediateContext->OMSetRenderTargets(1, &g_pRenderTargetView, g_pDepthStencilView);
UINT stride = sizeof(SimpleVertex);
UINT offset = 0;
g_pImmediateContext->IASetVertexBuffers(0, 1, &g_pMeshVertexBuffer, &stride, &offset);
g_pImmediateContext->IASetInputLayout(g_pMeshVertexLayout);
g_pImmediateContext->IASetIndexBuffer(g_pMeshIndexBuffer, DXGI_FORMAT_R16_UINT, 0);
g_pImmediateContext->VSSetShader(g_pMeshVertexShader, nullptr, 0);
g_pImmediateContext->PSSetShader(g_pMeshPixelShader, nullptr, 0);
g_pImmediateContext->PSSetSamplers(0, 1, &g_pSamplerLinear);
g_pImmediateContext->DrawIndexed(numMeshIndices, 0, 0);
}
Any help would be greatly appreciated.

Input layout, access violation, error handling not working like i want it to

I'm learning DirectX 11, and I wanted to get my head around DirectX debugging because I got an access violation reading location error on line 199 (create inputLayout)
I am trying to get an error box with directX errors show up because I read somewhere that it is a good programming practice to have that box show up with information about errors
Any ideas?
Also, help with the input layout would be appreciated
ID3DBlob *VS, *PS;
#if defined(DEBUG) | defined(_DEBUG)
#ifndef HR
#define HR(x) \
{ \
HRESULT hr = (x); \
if(FAILED(hr)) \
{ \
DXTrace(__FILE__, (DWORD)__LINE__, hr, L#x, true); \
} \
}
#endif
#else
#ifndef HR
#define HR(x) (x)
#endif
#endif
D3DX11CompileFromFile(L"shaders.fx", 0, 0, "VS", "vs_5_0", 0, 0, 0, &VS, 0, 0);
D3DX11CompileFromFile(L"shaders.fx", 0, 0, "PS", "ps_5_0", 0, 0, 0, &PS, 0, 0);
device->CreateVertexShader(VS->GetBufferPointer(), VS->GetBufferSize(), NULL, &vShader);
device->CreatePixelShader(PS->GetBufferPointer(), PS->GetBufferSize(), NULL, &pShader);
VS->Release();
PS->Release();
context->VSSetShader(vShader, 0, 0);
context->PSSetShader(pShader, 0, 0);
// define the input layout
D3D11_INPUT_ELEMENT_DESC layout[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "COLOR", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 }
};
UINT numElements = ARRAYSIZE(layout);
//below gives me access violation error and says that &inputLayout is NULL
HR(device->CreateInputLayout(layout, numElements, VS->GetBufferPointer(), VS->GetBufferSize(), &inputLayout));
You are releasing the VS blob before you create the layout according to the code above. You need the original Vertex Shader binary blob at the time you create the Input Layout so that Direct3D can validate they match up.
A simple fix is to move VS->Release(); to after you call CreateInputLayout.
A better answer is to remove all explicit use of Release an instead rely on a smart-pointer like Microsoft::WRL::ComPtr.
#include <wrl/client.h>
using Microsoft::WRL::ComPtr;
...
ComPtr<ID3DBlob> VS, PS;
...
D3DX11CompileFromFile(L"shaders.fx", 0, 0, "VS", "vs_5_0", 0, 0, 0, &VS, 0, 0);
D3DX11CompileFromFile(L"shaders.fx", 0, 0, "PS", "ps_5_0", 0, 0, 0, &PS, 0, 0);
device->CreateVertexShader(VS->GetBufferPointer(), VS->GetBufferSize(), NULL, &vShader);
device->CreatePixelShader(PS->GetBufferPointer(), PS->GetBufferSize(), NULL, &pShader);
context->VSSetShader(vShader, 0, 0);
context->PSSetShader(pShader, 0, 0);
...
//below gives me access violation error and says that &inputLayout is NULL
HR(device->CreateInputLayout(layout, numElements, VS->GetBufferPointer(), VS->GetBufferSize()));
Whenever VS and PS go out of scope, they will take clear of cleaning themselves up.

DX11 C++ Shader can't receive Instance Buffer content

i would like to draw Instances of an obj File. After i implemented the Instancing instead of drawing each Object by his own draw() function (which worked just fine), the Instances are not positioned correctly. Probably the data from the InstanceBuffer is not set in the shader correctly.
D3DMain.cpp - creating input layout
struct INSTANCE {
//D3DXMATRIX matTrans;
D3DXVECTOR3
};
/***/
// create the input layout object
D3D11_INPUT_ELEMENT_DESC ied[] =
{
//vertex buffer
{"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0},
//instance buffer
{"INSTTRANS", 0, DXGI_FORMAT_R32G32B32_FLOAT, 1, 0, D3D11_INPUT_PER_INSTANCE_DATA, 1},
//{"INSTTRANS", 1, DXGI_FORMAT_R32G32B32A32_FLOAT, 1, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_INSTANCE_DATA, 1},
//{"INSTTRANS", 2, DXGI_FORMAT_R32G32B32A32_FLOAT, 1, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_INSTANCE_DATA, 1},
//{"INSTTRANS", 3, DXGI_FORMAT_R32G32B32A32_FLOAT, 1, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_INSTANCE_DATA, 1},
};
if (FAILED(d3ddev->CreateInputLayout(ied, 4, VS->GetBufferPointer(), VS->GetBufferSize(), &pLayout))) throw(std::string("Input Layout Creation Error"));
d3ddevcon->IASetInputLayout(pLayout);
World.cpp - setting up instance buffer
std::vector<INSTANCE> instanceBuffer;
INSTANCE insertInstance;
D3DXMATRIX scaleMat, transMat;
D3DXMatrixScaling(&scaleMat, 50.0f, 50.0f, 50.0f);
int i=0;
for (std::list<SINSTANCES>::iterator it = sInstances.begin(); it != sInstances.end(); it++) {
if ((*it).TypeID == typeId) {
//do something
D3DXMatrixTranslation(&transMat, (*it).pos.x, (*it).pos.y, (*it).pos.z);
insertInstance.matTrans = (*it).pos;//scaleMat * transMat;
instanceBuffer.push_back(insertInstance);
i++;
}
}
instanceCount[typeId] = i;
//create new IB
D3D11_BUFFER_DESC instanceBufferDesc;
ZeroMemory(&instanceBufferDesc, sizeof(instanceBufferDesc));
instanceBufferDesc.Usage = D3D11_USAGE_DEFAULT;
instanceBufferDesc.ByteWidth = sizeof(INSTANCE) * i;
instanceBufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
instanceBufferDesc.CPUAccessFlags = 0;
instanceBufferDesc.MiscFlags = 0;
D3D11_SUBRESOURCE_DATA instanceData;
ZeroMemory(&instanceData, sizeof(instanceData));
instanceData.pSysMem = &instanceBuffer[0];
if (FAILED(d3ddev->CreateBuffer(&instanceBufferDesc, &instanceData, &instanceBufferMap[typeId]))) throw(std::string("Failed to Update Instance Buffer"));
OpenDrawObj.cpp - drawing .obj file
UINT stride[2] = {sizeof(VERTEX), sizeof(INSTANCE)};
UINT offset[2] = {0, 0};
ID3D11Buffer* combinedBuffer[2] = {meshVertBuff, instanceBuffer};
d3ddevcon->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
d3ddevcon->IASetVertexBuffers(0, 2, combinedBuffer, stride, offset);
d3ddevcon->IASetIndexBuffer(meshIndexBuff, DXGI_FORMAT_R32_UINT, 0);
std::map<std::wstring, OBJMATERIAL>::iterator fit;
for (std::vector<DRAWLIST>::iterator it = drawList.begin(); it != drawList.end(); it++) {
fit = objMaterials.find((*it).material);
if (fit != objMaterials.end()) {
if ((*fit).second.texture != NULL) {
d3ddevcon->PSSetShaderResources(0, 1, &((*fit).second.texture));
}
d3ddevcon->DrawIndexedInstanced((*it).indexCount, instanceCount, (*it).startIndex, 0, 0);
}
}
the drawing function (above) is called here: I pass the instance buffer (map(int, ID3D11Buffer*) and the instance numbers)
(*it).second->draw(0.0f, 0.0f, 0.0f, 0, instanceBufferMap[typeId], instanceCount[typeId]);
shader.hlsl
struct VIn
{
float4 position : POSITION;
float3 normal : NORMAL;
float2 texcoord : TEXCOORD;
//row_major float4x4 instTrans : INSTTRANS;
float4 instTrans : INSTTRANS;
uint instanceID : SV_InstanceID;
};
VOut VShader(VIn input)
{
VOut output;
//first: transforming instance
//output.position = mul(input.instTrans, input.position);
output.position = input.position;
output.position.xyz *= 50.0; //scale
output.position.z += input.instTrans.z; //apply only z value
float4 transPos = mul(world, output.position); //transform position with world matrix
output.position = mul(view, transPos); //project to screen
the "input.instTrans" in the last file is incorrect and contains ramdom data.
Do you have any ideas?
So i found the bug, it was at an totally unexpected location...
So here is the code snippet:
ID3D10Blob *VS, *VS2, *PS, *PS2; //<- i only used VS and PS before
//volume shader
if (FAILED(D3DX11CompileFromFile(L"resources/volume.hlsl", 0, 0, "VShader", "vs_5_0", D3D10_SHADER_PREFER_FLOW_CONTROL | D3D10_SHADER_SKIP_OPTIMIZATION, 0, 0, &VS, 0, 0))) throw(std::string("Volume Shader Error 1"));
if (FAILED(D3DX11CompileFromFile(L"resources/volume.hlsl", 0, 0, "PShader", "ps_5_0", D3D10_SHADER_PREFER_FLOW_CONTROL | D3D10_SHADER_SKIP_OPTIMIZATION, 0, 0, &PS, 0, 0))) throw(std::string("Volume Shader Error 2"));
// encapsulate both shaders into shader objects
if (FAILED(d3ddev->CreateVertexShader(VS->GetBufferPointer(), VS->GetBufferSize(), NULL, &pvolumeVS))) throw(std::string("Volume Shader Error 1A"));
if (FAILED(d3ddev->CreatePixelShader(PS->GetBufferPointer(), PS->GetBufferSize(), NULL, &pvolumePS))) throw(std::string("Volume Shader Error 2A"));
//sky shader
if (FAILED(D3DX11CompileFromFile(L"resources/sky.hlsl", 0, 0, "VShader", "vs_5_0", D3D10_SHADER_OPTIMIZATION_LEVEL3, 0, 0, &VS2, 0, 0))) throw(std::string("Sky Shader Error 1"));
if (FAILED(D3DX11CompileFromFile(L"resources/sky.hlsl", 0, 0, "PShader", "ps_5_0", D3D10_SHADER_OPTIMIZATION_LEVEL3, 0, 0, &PS2, 0, 0))) throw(std::string("Sky Shader Error 2"));
// encapsulate both shaders into shader objects
if (FAILED(d3ddev->CreateVertexShader(VS2->GetBufferPointer(), VS2->GetBufferSize(), NULL, &pskyVS))) throw(std::string("Sky Shader Error 1A"));
if (FAILED(d3ddev->CreatePixelShader(PS2->GetBufferPointer(), PS2->GetBufferSize(), NULL, &pskyPS))) throw(std::string("Sky Shader Error 2A"));
Using two buffers for compiling the shaders solved the problem, though i have no idea why. Thank you for the support, though ;)

Drawing a rectangle in Direct X

Using the tutorial here, I have managed to get a red triangle up on my screen: http://www.directxtutorial.com/Lesson.aspx?lessonid=9-4-4
CUSTOMVERTEX OurVertices[] =
{
{ 0, 0, 0, 1.0f, D3DCOLOR_XRGB( 127, 0, 0 ) },
{ WIDTH, 0, 0, 1.0f, D3DCOLOR_XRGB( 127, 0, 0 ) },
{ 0, 300, 0, 1.0f, D3DCOLOR_XRGB( 127, 0, 0 ) },
{ WIDTH, 300, 0, 1.0f, D3DCOLOR_XRGB( 127, 0, 0 ) }
};
d3dDevice->CreateVertexBuffer(3*sizeof(CUSTOMVERTEX),
0,
CUSTOMFVF,
D3DPOOL_MANAGED,
&vBuffer,
NULL);
VOID* pVoid; // the void* we were talking about
vBuffer->Lock(0, 0, (void**)&pVoid, 0); // locks v_buffer, the buffer we made earlier
memcpy(pVoid, OurVertices, sizeof(OurVertices)); // copy vertices to the vertex buffer
vBuffer->Unlock(); // unlock v_buffer
d3dDevice->SetFVF(CUSTOMFVF);
d3dDevice->SetStreamSource(0, vBuffer, 0, sizeof(CUSTOMVERTEX));
d3dDevice->DrawPrimitive(D3DPT_TRIANGLELIST, 0, 1);
But you can see that I really want to be drawing a rectangle.
I have changed the Primitive to draw 2 triangles and extended the buffer size to 4*size of my custom vertex but I can't really say I understand how to get it from my triangle to my rectangle I would like:
Is there a better way of drawing a rectangle rather than using a quad considering I just want to sling some text on top of it something like this:
http://1.bp.blogspot.com/-6HjFVnrVM94/TgRq8oP4U-I/AAAAAAAAAKk/i8N0OZU999E/s1600/monkey_island_screen.jpg
I had to exend my buffer to allow for 4 vertex array size:
d3dDevice->CreateVertexBuffer(4*sizeof(CUSTOMVERTEX),
0,
CUSTOMFVF,
D3DPOOL_MANAGED,
&vBuffer,
NULL);
And then changed the draw primitive from TRIANGLELIST to STRIP extending the amount of triangles drawn to 2
d3dDevice->DrawPrimitive (D3DPT_TRIANGLESTRIP, 0, 2 );
Source: http://www.mdxinfo.com/tutorials/tutorial4.php

Texture fail to render - DirectX 9

I have this code in DirectX 9, in which I need to get the DC from a COM interface and draw it. I do get the DC and it contains the image, but I get a black screen. Any ideas why?
LPDIRECT3DSURFACE9 pRenderSurface = NULL, pRenderSurfaceTMP = NULL;
m_pRenderTexture->GetSurfaceLevel(0, &pRenderSurface);
if (pRenderSurface == NULL)
return FALSE;
m_pD3DDevice->CreateOffscreenPlainSurface(m_nWidth, m_nHeight,
D3DFMT_X8R8G8B8, D3DPOOL_SYSTEMMEM, &pRenderSurfaceTMP, 0);
m_pD3DDevice->GetRenderTargetData(pRenderSurface,pRenderSurfaceTMP);
HDC hDC = NULL;
hr = pRenderSurfaceTMP->GetDC(&hDC);
if (FAILED(hr))
return FALSE;
if (m_pViewObject != NULL)
{
// RECT is relative to the windowless container rect
RECTL rcRect = {0, 0, m_nWidth, m_nHeight};
// Draw onto the DC!
hr = m_pViewObject->Draw(DVASPECT_CONTENT, 1,
NULL, NULL, NULL, hDC, &rcRect, NULL, NULL,
0);
}
pRenderSurface->ReleaseDC(hDC);
pRenderSurface->Release();
// Draw the surface
m_pD3DDevice->SetStreamSource( 0, m_pVertexBuffer, 0, sizeof(Vertex) );
m_pD3DDevice->SetTexture( 0, m_pRenderTexture );
hr = m_pD3DDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
m_hbrBackground = NULL;
pRenderSurfaceTMP->Release();
Thanks in advance
Make sure that m_pRenderTexture is created with D3DUSAGE_RENDERTARGET. The size and format of both pRenderSurface and pRenderSurfaceTMP should match. For more info, see when GetRenderTargetData fails: http://msdn.microsoft.com/en-us/library/windows/desktop/bb174405(v=vs.85).aspx
Try this:
m_pD3DDevice->CreateTexture(m_nWidth, m_nHeight, 1, 0, D3DFMT_X8R8G8B8,
D3DPOOL_MANAGED, &m_pRenderTexture, NULL);
// ...
LPDIRECT3DSURFACE9 pRenderSurface = NULL;
if (FAILED(m_pRenderTexture->GetSurfaceLevel(0, &pRenderSurface))) return FALSE;
HDC hDC = NULL;
if (FAILED(pRenderSurface->GetDC(&hDC))) return FALSE;
if (m_pViewObject != NULL)
{
RECTL rcRect = {0, 0, m_nWidth, m_nHeight};
m_pViewObject->Draw(DVASPECT_CONTENT, 1,
NULL, NULL, NULL, hDC, &rcRect, NULL, NULL, 0);
}
pRenderSurface->ReleaseDC(hDC);
pRenderSurface->Release();
m_pD3DDevice->SetStreamSource( 0, m_pVertexBuffer, 0, sizeof(Vertex) );
m_pD3DDevice->SetTexture( 0, m_pRenderTexture );
m_pD3DDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
or this:
m_pD3DDevice->CreateTexture(m_nWidth, m_nHeight, 1, 0, D3DFMT_X8R8G8B8,
D3DPOOL_DEFAULT, &m_pRenderTexture, NULL);
// ...
LPDIRECT3DSURFACE9 pRenderSurfaceTMP = NULL;
m_pD3DDevice->CreateOffscreenPlainSurface(m_nWidth, m_nHeight,
D3DFMT_X8R8G8B8, D3DPOOL_SYSTEMMEM, &pRenderSurfaceTMP, 0);
HDC hDC = NULL;
if (FAILED(pRenderSurfaceTMP->GetDC(&hDC))) return FALSE;
if (m_pViewObject != NULL)
{
RECTL rcRect = {0, 0, m_nWidth, m_nHeight};
m_pViewObject->Draw(DVASPECT_CONTENT, 1,
NULL, NULL, NULL, hDC, &rcRect, NULL, NULL, 0);
}
pRenderSurfaceTMP->ReleaseDC(hDC);
LPDIRECT3DSURFACE9 pRenderSurface = NULL;
if (FAILED(m_pRenderTexture->GetSurfaceLevel(0, &pRenderSurface)))
return FALSE;
if (FAILED(m_pD3DDevice->UpdateSurface(pRenderSurfaceTMP, NULL,
pRenderSurface, NULL))) return FALSE;
pRenderSurface->Release();
m_pD3DDevice->SetStreamSource( 0, m_pVertexBuffer, 0, sizeof(Vertex) );
m_pD3DDevice->SetTexture( 0, m_pRenderTexture );
m_pD3DDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);