I'm trying to make a color picker system, and for this I need a color gradient.
I'm trying to use DrawPrimitive to draw a square with color vertex. but the result is not what I wanted, the gradient is not so pretty in comparison to other programs
My Gradient:
Gradient Photoshop/ImGui/Other programs seem to:
void GradientColor(float x, float y, float width, float height)
{
static struct D3DVERTEX
{
float x, y, z, rhw;
DWORD color;
};
D3DVERTEX vertices[4];
vertices[0].x = x;
vertices[0].y = y;
vertices[0].z = 0.f;
vertices[0].rhw = 1.f;
vertices[0].color = 0xffffffff; //White
vertices[1].x = x + width;
vertices[1].y = y;
vertices[1].z = 0.f;
vertices[1].rhw = 1.f;
vertices[1].color = 0xffff0000; //Red
vertices[2].x = x;
vertices[2].y = y + height;
vertices[2].z = 0.f;
vertices[2].rhw = 1.f;
vertices[2].color = 0xff000000; //Black
vertices[3].x = x + width;
vertices[3].y = y + height;
vertices[3].z = 0.f;
vertices[3].rhw = 1.f;
vertices[3].color = 0xff000000; //Black
static LPDIRECT3DVERTEXBUFFER9 pVertexObject = NULL;
static void *pVertexBuffer = NULL;
if (!pVertexObject) {
if (FAILED(device->CreateVertexBuffer(sizeof(vertices), 0,
D3DFVF_XYZRHW | D3DFVF_DIFFUSE, D3DPOOL_DEFAULT, &pVertexObject, NULL)))
return;
}
if (FAILED(pVertexObject->Lock(0, sizeof(vertices), &pVertexBuffer, 0)))
return;
memcpy(pVertexBuffer, vertices, sizeof(vertices));
pVertexObject->Unlock();
device->SetStreamSource(0, pVertexObject, 0, sizeof(D3DVERTEX));
device->SetFVF(D3DFVF_XYZRHW | D3DFVF_DIFFUSE);
device->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
}
I know the difference is not big, but it's perceptible, Thanks for all.
Related
My problem is a little bit extense. I'm trying to print a sprite using DirectX 11 and to handle the scale and rotations I use a transformation matrix. I extracted this code from SFML, an open-source library, and for the translation works fine. However, the rotation and scale aren't working as expected as I show next.
When I rotate across the center all is correct.
When I move the sprite and then I rotate, the sprite rotates around an "unknown" point.
The same happens when I scale the sprite, it scales himself from an "unknown" point.
If I rotate the sprite, then the translation axis is the same but with the sprite rotated, so I can't move it properly.
I leave you here a little video of exactly what's going on. [VIDEO]
I think the problem is related to how I transform the sprite, but I can't assure it. I leave you here the code parts I think are involved in this error but you also can take a look into the GitHub project to take a deep look. [GITHUB PROJECT]
Transformable.cpp
const Transform& Transformable::GetTransform() const
{
if (_transformNeedUpdate)
{
float angle = -_rotation * 3.141592654f / 180.f;
float cosine = static_cast<float>(std::cos(angle));
float sine = static_cast<float>(std::sin(angle));
float sxc = _scale.x * cosine;
float syc = _scale.y * cosine;
float sxs = _scale.x * sine;
float sys = _scale.y * sine;
float tx = -_origin.x * sxc - _origin.y * sys + _position.x;
float ty = _origin.x * sxs - _origin.y * syc + _position.y;
_transform = Transform(sxc, sys, tx,
-sxs, syc, ty,
0.f, 0.f, 1.f);
_transformNeedUpdate = false;
}
return _transform;
}
Transform.cpp
Transform::Transform(
float a00, float a01, float a02,
float a10, float a11, float a12,
float a20, float a21, float a22
)
{
_matrix[0] = a00; _matrix[4] = a01; _matrix[8] = 0.f; _matrix[12] = a02;
_matrix[1] = a10; _matrix[5] = a11; _matrix[9] = 0.f; _matrix[13] = a12;
_matrix[2] = 0.f; _matrix[6] = 0.f; _matrix[10] = 1.f; _matrix[14] = 0.f;
_matrix[3] = a20; _matrix[7] = a21; _matrix[11] = 0.f; _matrix[15] = a22;
}
D3DXVECTOR2 Transform::TransformPoint(float x, float y) const
{
return D3DXVECTOR2(_matrix[0] * x + _matrix[4] * y + _matrix[12],
_matrix[1] * x + _matrix[5] * y + _matrix[13]);
}
D3DXVECTOR2 operator *(const Transform& left, const D3DXVECTOR2& right)
{
return left.TransformPoint(right);
}
Bitmap.cpp (Where I setup the vertex to be drawn)
HRESULT Bitmap::UpdateBuffers(ID3D11DeviceContext* deviceContext)
{
if (_transform == _previousTransform && _bounds == _previousBounds)
{
return S_OK;
}
VertexType* vertices;
D3D11_MAPPED_SUBRESOURCE mappedResource;
VertexType* verticesPtr;
HRESULT result;
_previousTransform = _transform;
_previousBounds = _bounds;
vertices = new VertexType[_vertexCount];
if (!vertices)
{
return CO_E_ERRORINAPP;
}
float left = _bounds.left();
float right = left + _bounds.width();
float top = _bounds.top();
float bottom = top + _bounds.height();
D3DXVECTOR2 topLeft = { left, top };
D3DXVECTOR2 bottomRight = { right, bottom };
D3DXVECTOR2 topRight = { right, top };
D3DXVECTOR2 bottomLeft = { left, bottom };
topLeft = _transform * topLeft;
bottomRight = _transform * bottomRight;
topRight = _transform * topRight;
bottomLeft = _transform * bottomLeft;
vertices[0].position = D3DXVECTOR3(topLeft.x, topLeft.y, 0.0f);
vertices[0].texture = D3DXVECTOR2(0.0f, 0.0f);
vertices[1].position = D3DXVECTOR3(bottomRight.x, bottomRight.y, 0.0f);
vertices[1].texture = D3DXVECTOR2(1.0f, 1.0f);
vertices[2].position = D3DXVECTOR3(bottomLeft.x, bottomLeft.y, 0.0f);
vertices[2].texture = D3DXVECTOR2(0.0f, 1.0f);
vertices[3].position = D3DXVECTOR3(topLeft.x, topLeft.y, 0.0f);
vertices[3].texture = D3DXVECTOR2(0.0f, 0.0f);
vertices[4].position = D3DXVECTOR3(topRight.x, topRight.y, 0.0f);
vertices[4].texture = D3DXVECTOR2(1.0f, 0.0f);
vertices[5].position = D3DXVECTOR3(bottomRight.x, bottomRight.y, 0.0f);
vertices[5].texture = D3DXVECTOR2(1.0f, 1.0f);
result = deviceContext->Map(_vertexBuffer, 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedResource);
if (FAILED(result))
{
return CO_E_ERRORINAPP;
}
verticesPtr = (VertexType*)mappedResource.pData;
memcpy(verticesPtr, (void*)vertices, (sizeof(VertexType) * _vertexCount));
deviceContext->Unmap(_vertexBuffer, 0);
delete[] vertices;
vertices = 0;
return S_OK;
}
The Coordinate System is like the shown in the image below; the center of the screen is the (0,0) position.
The code flow
When the position, scale, rotation or origin of the actor changes, a new Transform is generated. Then, I transform the local bounds of the actor with the generated transform to get the correct positions of the vertex, and I set them into the vertex buffer.
Today for some reason Freetype stopped working for me. Everything worked fine yesterday, I haven't even touched the code. Instead of letter planes with specific text colors are being rendered.
I checked already if that's blending problem.
Code for loading the glyph.
// This is the relative path to the font,
// later used to load the font.
std::string strFontPath = "fonts/" + strFont + ".ttf";
// Initialising Freetype Library
FT_Library _ft;
FT_Init_FreeType(&_ft);
// Loading font from the file.
FT_Face _face;
if (FT_New_Face(_ft, strFontPath.c_str(), 0, &_face))
EXIT_ERROR(-11);
FT_Set_Pixel_Sizes(_face, 0, 48);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
// When I used char instead program misbehaved so I'm using
// unsigned short int and convert the type.
for (unsigned short int usiCurrentCharacter = 30; usiCurrentCharacter < 128; usiCurrentCharacter++)
{
// Loading char to the face, fails when char fails to load.
if (FT_Load_Glyph(_face, FT_Get_Char_Index(_face,(char)usiCurrentCharacter), FT_LOAD_RENDER))
{
EXIT_ERROR(-12);
}
std::cout << (char)usiCurrentCharacter << _face->glyph->bitmap.width << " " << _face->glyph->bitmap.rows << std::endl;
// Create New Font
FontTexture* _char = new FontTexture(_face->glyph->bitmap.buffer,
0,
{ static_cast<float>(_face->glyph->bitmap.width),
static_cast<float>(_face->glyph->bitmap.rows) });
_char->SetAdvance(_face->glyph->advance.x);
_char->SetBearing({ static_cast<float>(_face->glyph->bitmap_left),
static_cast<float>(_face->glyph->bitmap_top) });
TextureManager::getTextureManager().PrecacheTexture(std::to_string(usiCurrentCharacter) + strFont, _char);
Characters.insert(std::pair<char, FontTexture*>(char(usiCurrentCharacter), _char));
}
FT_Done_Face(_face);
FT_Done_FreeType(_ft);
Also here goes my renderer code:
shader->Bind();
shader->SetUniform3f("u_TextureColor", color.x, color.y, color.z);
//shader->SetUniform2f("u_Position", pos.x, pos.y); //We have to calculate array on cpu anyway :P
shader->SetUniform1i("u_Texture", 1);
//Iterate through all characters
for (char c : text)
{
FontTexture* _character = font->getFontTexture(c);
float xpos = pos.x + _character->GetBearing().x * scale;
float ypos = pos.y - (_character->GetSize().y - _character->GetBearing().y) * scale;
float w = _character->GetSize().x * scale;
float h = _character->GetSize().y * scale;
vertices[0] = xpos; vertices[1] = ypos + h; vertices[2] = 0.0; vertices[3] = 0.0;
vertices[4] = xpos; vertices[5] = ypos; vertices[6] = 0.0; vertices[7] = 1.0;
vertices[8] = xpos + w; vertices[9] = ypos; vertices[10] = 1.0; vertices[11] = 1.0;
vertices[12] = xpos; vertices[13] = ypos + h; vertices[14] = 0.0; vertices[15] = 0.0;
vertices[16] = xpos + w;vertices[17] = ypos; vertices[18] = 1.0; vertices[19] = 1.0;
vertices[20] = xpos + w;vertices[21] = ypos + h; vertices[22] = 1.0; vertices[23] = 0.0;
//_character->Bind(1);
shader->Bind();
va->Bind();
vb->Bind(vertices, sizeof(vertices));
Renderer::getRenderer().DrawArrays(*va, *vb, vertices);
//_character->Unbind();
pos.x += (_character->GetAdvance() >> 6) * scale;
}
This is the result I got yesterday:
This is what I'm getting today:
I've just noticed:
//_character->Bind(1);
That meant the texture wasn't binded before drawing.
Commenting it out fixed the problem.
can anyone tell me how to use
GL11.glreadpixels()
in lwjgl to get the z depth of the ray casted by mouse?
i can get x,y and of the view before transforming it to ray
float x = Mouse.getX();
float y = Mouse.getY();
but i don't know how to use glreadpixels
as when i use it it doesn't give any significance
both calculateMousePoint and calculate MouseRay give the same result
public static float getZDepth(int x, int y)
{
ByteBuffer zdepth = allocBytes(SIZE_FLOAT);
GL11.glReadPixels(x, y, 1, 1, GL11.GL_DEPTH_COMPONENT, GL11.GL_FLOAT, zdepth);
return ( (float) (zdepth.getFloat(0)));
}
private Vector3f calculateMouseRay() {
float mouseX = Mouse.getX();
float mouseY = Mouse.getY();
Vector2f normalizedCoords = getNormalisedDeviceCoordinates(mouseX, mouseY);
Vector4f clipCoords = new Vector4f(normalizedCoords.x, normalizedCoords.y, -1.0f, 1.0f);
Vector4f eyeCoords = toEyeCoords(clipCoords);
Vector3f worldRay = toWorldCoords(eyeCoords);
return worldRay;
}
private Vector3f calculateMousePoint() {
float mouseX = Mouse.getX();
float mouseY = Mouse.getY();
float mouseZ = getZDepth((int)mouseX,(int) mouseY);
Vector2f normalizedCoords = getNormalisedDeviceCoordinates(mouseX, mouseY);
Vector4f clipCoords = new Vector4f(normalizedCoords.x, normalizedCoords.y, mouseZ, 1.0f);
Vector4f eyeCoords = toEyeCoords2(clipCoords);
Vector3f worldRay = toWorldCoords(eyeCoords);
return worldRay;
}
private Vector3f toWorldCoords(Vector4f eyeCoords) {
Matrix4f invertedView = Matrix4f.invert(viewMatrix, null);
Vector4f rayWorld = Matrix4f.transform(invertedView, eyeCoords, null);
Vector3f mouseRay = new Vector3f(rayWorld.x, rayWorld.y, rayWorld.z);
mouseRay.normalise();
return mouseRay;
}
private Vector4f toEyeCoords(Vector4f clipCoords) {
Matrix4f invertedProjection = Matrix4f.invert(projectionMatrix, null);
Vector4f eyeCoords = Matrix4f.transform(invertedProjection, clipCoords, null);
return new Vector4f(eyeCoords.x, eyeCoords.y, -1f, 0f);
}
private Vector4f toEyeCoords2(Vector4f clipCoords) {
Matrix4f invertedProjection = Matrix4f.invert(projectionMatrix, null);
Vector4f eyeCoords = Matrix4f.transform(invertedProjection, clipCoords, null);
return new Vector4f(eyeCoords.x, eyeCoords.y, eyeCoords.z, 0f);
}
private Vector2f getNormalisedDeviceCoordinates(float mouseX, float mouseY) {
float x = (2.0f * mouseX) / Display.getWidth() - 1f;
float y = (2.0f * mouseY) / Display.getHeight() - 1f;
return new Vector2f(x, y);
}
From the documentation, x,y and _width, height stand for the area to shoot. type is the type of data, then data is the result.
Last, the most important here is the format parameter : You can select what you want to retreive. For you, it will be GL_DEPTH_COMPONENT :
float zmouse;
GL11.glReadnPixels(xmouse, ymouse, 1, 1, GL11.GL_DEPTH_COMPONENT, GL11.GL_FLOAT, zmouse)
You have the Z depth, now you have to convert it into the good space. Actually, it's the clip space and I think you would get the camera space one. So you have to multiply the "mouse" point by the invert of projection and view matrix, something like realPoint = inverse(projection * view * model) * (xmouse, ymousen, zmouse).
Finally, realPoint is the point in the 3D space.
With your code as example, this should do the work :
public static float getZDepth(int x, int y)
{
ByteBuffer zdepth = allocBytes(SIZE_FLOAT);
GL11.glReadPixels(x, y, 1, 1, GL11.GL_DEPTH_COMPONENT, GL11.GL_FLOAT, zdepth);
return ( (float) (zdepth.getFloat(0)));
}
private Vector3f calculateMousePoint(Vector3f point) {
float x = Mouse.getX();
float y = Mouse.getY();
float z = getZDepth((int)mouseX,(int) mouseY);
return project(new Vector3f(x,y,z), new Vector4f(0,0,Display.getWidth(), Display.getHeight()));
}
private Vector3f calculateFarPoint(Vector3f point) {
float x = Mouse.getX();
float y = Mouse.getY();
return project(new Vector3f(x,y,1.), new Vector4f(0,0,Display.getWidth(), Display.getHeight()));
}
// Code translated from GLM_GTC_matrix_transform
//(https://glm.g-truc.net/0.9.2/api/a00245.html#gac38d611231b15799a0c06c54ff1ede43)
private Vector3f project(Vector3f point, Vector4f viewport)
{
Matrix4f Inverse = Matrix4f.invert(projectionMatrix * viewMatrix)
Vector4f tmp = new Vector4f(point.x, point.y, point.z, 1.f);
tmp.x = (tmp.x - viewport.x) / viewport.z;
tmp.y = (tmp.y - viewport.y) / viewport.w;
tmp = tmp * 2.0 - 1.0;
Vector4f obj = Inverse * tmp;
obj /= obj.w;
return new Vector3f(obj.x, obj.y, obj.z);
}
I'm working on a 2D game using Direct3D 10's ID3DX10Sprite interface. It works pretty well except for that the textures are filtered using a linear algorithm (I think?), which makes them look pretty ugly when you scale them.
Original texture (32 x 32):
What it looks like scaled up in-game:
What I want it to look like:
So my question is: Is there a way to use Nearest Neighbour filtering (aka Point filtering) for the sprites, and how do you do that?
This is my code:
Initialization:
float width = 818.0F;
float height = 646.0F;
IDXGISwapChain* swapChain;
ID3D10Device* device = Direct3D_CreateDevice(hWnd, swapChain, (int)width, (int)height);
ID3D10RenderTargetView* rtv = Direct3D_CreateRenderTargetView(device, swapChain);
ID3DX10Sprite* mainSprite = Direct3D_CreateMainSpriteObject(device);
ID3D10ShaderResourceView* texture = Direct3D_CreateTexture(device, "C:\\Users\\Vincent\\Documents\\visual studio 2010\\Projects\\DirectX Test C++\\Debug\\base_grass.png", 32, 32);
D3DX10_SPRITE* sprite = Direct3D_CreateSprite(texture, 0.0F, 0.0F, 1.0F, 1.0F); //800.0F / 64.0F, 600.0F / 64.0F);
Direct3D_CreateViewport(device, 0, 0, (UINT)width, (UINT)height);
Rendering:
FLOAT* backColor = new FLOAT[4];
backColor[0] = 0.0F;
backColor[1] = 0.5F;
backColor[2] = 0.0F;
backColor[3] = 1.0F;
device->ClearRenderTargetView(rtv, backColor);
device->Draw(3, 0);
Direct3D_DrawSpritesBuffered(mainSprite, sprite, 1);
swapChain->Present(0, 0);
Direct3D functions:
/////////////////////////////////////////////////
// Direct3D_CreateDevice //
/////////////////////////////////////////////////
ID3D10Device * __stdcall Direct3D_CreateDevice(HWND hWnd, IDXGISwapChain* &swapChain, int width, int height)
{
//Variables.
ID3D10Device* D3DDevice;
DXGI_SWAP_CHAIN_DESC swapChainDescription;
ZeroMemory(&swapChainDescription, sizeof(DXGI_SWAP_CHAIN_DESC));
//Buffer settings.
swapChainDescription.BufferCount = 1;
swapChainDescription.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
swapChainDescription.BufferDesc.Width = width;
swapChainDescription.BufferDesc.Height = height;
swapChainDescription.BufferDesc.RefreshRate.Numerator = 60;
swapChainDescription.BufferDesc.RefreshRate.Denominator = 1;
swapChainDescription.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
//Misc.
swapChainDescription.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH;
swapChainDescription.OutputWindow = hWnd;
swapChainDescription.SampleDesc.Count = 1;
swapChainDescription.SampleDesc.Quality = 0;
swapChainDescription.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
swapChainDescription.Windowed = TRUE;
//Try to create the device and SwapChain.
if (FAILED(D3D10CreateDeviceAndSwapChain(NULL,
D3D10_DRIVER_TYPE_HARDWARE,
NULL,
D3D10_CREATE_DEVICE_DEBUG,
D3D10_SDK_VERSION,
&swapChainDescription,
&swapChain,
&D3DDevice))) return NULL;
return D3DDevice;
}
/////////////////////////////////////////////////
// Direct3D_CreateRenderTargetView //
/////////////////////////////////////////////////
ID3D10RenderTargetView * __stdcall Direct3D_CreateRenderTargetView(ID3D10Device* device, IDXGISwapChain* swapChain)
{
//Variables.
HRESULT hRes = 0;
ID3D10Texture2D* backBuffer;
ID3D10RenderTargetView* renderTargetView;
//Get the back buffer.
hRes = swapChain->GetBuffer(0, __uuidof(ID3D10Texture2D), (LPVOID*)&backBuffer);
if(FAILED(hRes)) { return NULL; }
//Try to create the RenderTargetView.
hRes = device->CreateRenderTargetView(backBuffer, NULL, &renderTargetView);
if(FAILED(hRes)) { return NULL; }
//Release the back buffer
backBuffer->Release();
//Set the render target
device->OMSetRenderTargets(1, &renderTargetView, NULL);
return renderTargetView;
}
/////////////////////////////////////////////////
// Direct3D_CreateViewport //
/////////////////////////////////////////////////
void __stdcall Direct3D_CreateViewport(ID3D10Device* device, int x, int y, UINT width, UINT height)
{
D3D10_VIEWPORT* viewport = new D3D10_VIEWPORT();
viewport->TopLeftX = x;
viewport->TopLeftY = y;
viewport->Width = width;
viewport->Height = height;
viewport->MinDepth = 0.0F;
viewport->MaxDepth = 1.0F;
device->RSSetViewports(1, viewport);
}
/////////////////////////////////////////////////
// Direct3D_CreateMainSpriteObject //
/////////////////////////////////////////////////
ID3DX10Sprite * __stdcall Direct3D_CreateMainSpriteObject(ID3D10Device* device)
{
//Create the sprite object.
ID3DX10Sprite* s;
HRESULT hRes = D3DX10CreateSprite(device, 4096, &s);
if(FAILED(hRes)) { return NULL; }
//Construct the Projection- and ViewTransform matrix.
D3DXMATRIX matview;
matview._12 = 0.0F;
matview._13 = 0.0F;
matview._14 = 0.0F;
matview._21 = 0.0F;
matview._23 = 0.0F;
matview._24 = 0.0F;
matview._31 = 0.0F;
matview._32 = 0.0F;
matview._34 = 0.0F;
matview._41 = 0.0F;
matview._42 = 0.0F;
matview._43 = 0.0F;
matview._11 = 1.0F;
matview._22 = 1.0F;
matview._33 = 1.0F;
matview._44 = 1.0F;
//Set the Projection- and ViewTransforms.
s->SetProjectionTransform(&matview);
s->SetViewTransform(&matview);
return s;
}
/////////////////////////////////////////////////
// Direct3D_DrawSpritesBuffered //
/////////////////////////////////////////////////
void __stdcall Direct3D_DrawSpritesBuffered(ID3DX10Sprite* spriteObject, D3DX10_SPRITE* sprites, int count)
{
spriteObject->Begin(0);
spriteObject->DrawSpritesBuffered(sprites, count);
spriteObject->Flush();
spriteObject->End();
}
/////////////////////////////////////////////////
// Direct3D_CreateTexture //
/////////////////////////////////////////////////
ID3D10ShaderResourceView * __stdcall Direct3D_CreateTexture(ID3D10Device* device, LPCSTR file, int width, int height)
{
//Variables.
D3DX10_IMAGE_LOAD_INFO imgLoadInfo;
ID3D10ShaderResourceView * shaderResourceView;
ZeroMemory(&imgLoadInfo, sizeof(imgLoadInfo));
//Image load settings.
imgLoadInfo.BindFlags = D3D10_BIND_SHADER_RESOURCE;
imgLoadInfo.CpuAccessFlags = 0;
imgLoadInfo.Filter = D3DX10_FILTER_NONE;
imgLoadInfo.FirstMipLevel = 0;
imgLoadInfo.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
imgLoadInfo.MipFilter = D3DX10_FILTER_NONE;
imgLoadInfo.MipLevels = 1;
imgLoadInfo.MiscFlags = 0;
imgLoadInfo.Usage = D3D10_USAGE_DEFAULT;
//Get the source image's info.
imgLoadInfo.pSrcInfo = new D3DX10_IMAGE_INFO();
D3DX10GetImageInfoFromFileA(file, NULL, imgLoadInfo.pSrcInfo, NULL);
//Set the texture dimensions.
imgLoadInfo.Width = width;
imgLoadInfo.Height = height;
HRESULT hRes;
//Attempt to create the ShaderResourceView.
if(FAILED(D3DX10CreateShaderResourceViewFromFile(device, file, &imgLoadInfo, NULL, &shaderResourceView, &hRes)))
{
return NULL;
}
return shaderResourceView;
}
/////////////////////////////////////////////////
// Direct3D_CreateSprite //
/////////////////////////////////////////////////
D3DX10_SPRITE * __stdcall Direct3D_CreateSprite(ID3D10ShaderResourceView* texture, float textureX, float textureY, float textureWidth, float textureHeight)
{
//Variables.
D3DX10_SPRITE* sprite = new D3DX10_SPRITE();
//Color settings.
sprite->ColorModulate.r = 1.0f;
sprite->ColorModulate.g = 1.0f;
sprite->ColorModulate.b = 1.0f;
sprite->ColorModulate.a = 1.0f;
//Texture settings.
sprite->pTexture = texture;
sprite->TextureIndex = 0;
sprite->TexCoord.x = textureX;
sprite->TexCoord.y = textureY;
sprite->TexSize.x = textureWidth;
sprite->TexSize.y = textureHeight;
//Dimension and location matrix.
sprite->matWorld._12 = 0.0F;
sprite->matWorld._13 = 0.0F;
sprite->matWorld._14 = 0.0F;
sprite->matWorld._21 = 0.0F;
sprite->matWorld._23 = 0.0F;
sprite->matWorld._24 = 0.0F;
sprite->matWorld._31 = 0.0F;
sprite->matWorld._32 = 0.0F;
sprite->matWorld._34 = 0.0F;
sprite->matWorld._41 = 0.0F;
sprite->matWorld._42 = 0.0F;
sprite->matWorld._43 = 0.0F;
sprite->matWorld._11 = 1.0F;
sprite->matWorld._22 = 1.0F;
sprite->matWorld._33 = 1.0F;
sprite->matWorld._44 = 1.0F;
return sprite;
}
Legacy D3DX10_SPRITE only supports using a single sampler:
D3D10_SAMPLER_DESC splDesc;
ZeroMemory(&splDesc, sizeof(D3D10_SAMPLER_DESC));
splDesc.Filter = D3D10_FILTER_MIN_MAG_MIP_LINEAR;
splDesc.AddressU = D3D10_TEXTURE_ADDRESS_CLAMP;
splDesc.AddressV = D3D10_TEXTURE_ADDRESS_CLAMP;
splDesc.AddressW = D3D10_TEXTURE_ADDRESS_CLAMP;
splDesc.ComparisonFunc = D3D10_COMPARISON_NEVER;
splDesc.MaxLOD = FLT_MAX;
VH( m_pDevice->CreateSamplerState(&splDesc, &m_pSampler) );
It also does not provide any overloading/custom state mechanism.
SpriteBatch in the DirectX Tool Kit for DirectX 11 does provide the ability to set which sampler state to use and provides hooks for custom state call-backs:
void Begin(SpriteSortMode sortMode = SpriteSortMode_Deferred,
ID3D11BlendState* blendState = nullptr,
ID3D11SamplerState* samplerState = nullptr,
ID3D11DepthStencilState* depthStencilState = nullptr,
ID3D11RasterizerState* rasterizerState = nullptr,
std::function<void __cdecl()> setCustomShaders = nullptr,
XMMATRIX transformMatrix = MatrixIdentity);
The most sensible solution is to port from Direct3D 10 to Direct3D 11 and stop using legacy Direct3D 10.
If there's some particularly compelling reason why you have to stay on Direct3D 10, then you can take a look at SpriteBatch.h / SpriteBatch.cpp which you could copy out and back-port to Direct3D 10.
See MSDN, Where is the DirectX SDK (2015 Edition?), and Living without D3DX
I'm making software renderer.
Sometimes, transformed vertex's w coordinate is 0.
So, if i divide other coordinate by w, it occures error.
if camera position's z coordinate equal with vertex position's z coordinate, transformed vertex's w coordinate is 0.
is there wrong on my code?
below code is my transform code.
void Camera::ComputeWorldToCameraMat()
{
Vector3 zaxis;
zaxis.Set( _dir.x - _pos.x, _dir.y - _pos.y, _dir.z - _pos.z );
zaxis.Normalize();
// xaxis = zaxis X up
Vector3 xaxis;
xaxis = cross(zaxis, _up);
xaxis.Normalize();
Vector3 yaxis;
yaxis = cross(zaxis, xaxis);
_worldToCamera.Identity();
_worldToCamera.a11 = xaxis.x;
_worldToCamera.a12 = yaxis.x;
_worldToCamera.a13 = zaxis.x;
_worldToCamera.a14 = 0;
_worldToCamera.a21 = xaxis.y;
_worldToCamera.a22 = yaxis.y;
_worldToCamera.a23 = zaxis.y;
_worldToCamera.a24 = 0;
_worldToCamera.a31 = xaxis.z;
_worldToCamera.a32 = yaxis.z;
_worldToCamera.a33 = zaxis.z;
_worldToCamera.a34 = 0;
_worldToCamera.a41 = -dot(xaxis, _pos);
_worldToCamera.a42 = -dot(yaxis, _pos);
_worldToCamera.a43 = -dot(zaxis, _pos);
_worldToCamera.a44 = 1;
}
void Camera::ComputeProjectionMat( Viewport* viewport)
{
int width = viewport->_width;
int height = viewport->_height;
//float tanfov = (float)tan(_fov * 0.5f);
float cot = 1.0f / tan( _fov * 0.5f );
float aspect = (float)(width / height);
_projection.Identity();
_projection.a11 = cot / aspect;
_projection.a12 = 0;
_projection.a13 = 0;
_projection.a14 = 0;
_projection.a21 = 0;
_projection.a22 = cot;
_projection.a23 = 0;
_projection.a24 = 0;
_projection.a31 = 0;
_projection.a32 = 0;
_projection.a33 = -1 * (_farZ + _nearZ) / ( _farZ - _nearZ );
_projection.a34 = 2 * ( _nearZ * _farZ ) / ( _farZ - _nearZ );
_projection.a41 = 0;
_projection.a42 = 0;
_projection.a43 = 1;
_projection.a44 = 0;
}