Batching in DirectX - c++

I have done a framework so far in DirectX 9 and It uses a Single VertexBuffer which draws every texture I load, I have used one Vertex Array which draws every single Texture Quad inside of a Lock and Unlock call. I have heard there is a thing called Batching where I can Lock it once and Unlock when I draw everything. I know I can draw every texture quad in one call to lock and unlock but I have no idea how i can set different textures to each of these. Can Anyone here Guide me ?
vertexBufferRHW->Lock(0, 0, (void**)&VerticesRHW, NULL);
VerticesRHW[0].Color = color;
VerticesRHW[0].X = (float) Position.X;
VerticesRHW[0].Y = (float) Position.Y;
VerticesRHW[0].Z = 0.0f;
VerticesRHW[0].RHW = 1.0f;
VerticesRHW[0].U = 0.0f;
VerticesRHW[0].V = 0.0f;
VerticesRHW[1].Color = color;
VerticesRHW[1].X = (float) (Position.X+TextureFile.Width);
VerticesRHW[1].Y = (float) Position.Y;
VerticesRHW[1].Z = 0.0f;
VerticesRHW[1].RHW = 1.0f;
VerticesRHW[1].U = 1.0f;
VerticesRHW[1].V = 0.0f;
VerticesRHW[2].Color = color;
VerticesRHW[2].X = (float) (Position.X+TextureFile.Width);
VerticesRHW[2].Y = (float) (Position.Y+TextureFile.Height);
VerticesRHW[2].Z = 0.0f;
VerticesRHW[2].RHW = 1.0f;
VerticesRHW[2].U = 1.0f;
VerticesRHW[2].V = 1.0f;
VerticesRHW[3].Color = color;
VerticesRHW[3].X = (float) Position.X ;
VerticesRHW[3].Y = (float) (Position.Y+TextureFile.Height);
VerticesRHW[3].Z = 0.0f;
VerticesRHW[3].RHW = 1.0f;
VerticesRHW[3].U = 0.0f;
VerticesRHW[3].V = 1.0f;
vertexBufferRHW->Unlock();
spriteDevice->SetTexture(0,TextureFile.TextureFile);
spriteDevice->DrawPrimitive(D3DPT_TRIANGLEFAN,0,2);

You can make all your quads a height/width of 1 and then do a for loop like so:
for( ... loop through all the textures )
{
// create a matrix that scales your quad to make it the correct width/height and in the correct location
D3DXMATRIX matrix, scale, translation;
D3DXMatrixScaling( &scale, texture.width, texture.height, 0 );
D3DXMatrixTranslation( &translation, position.x, position.y, 0 );
matrix = scale * translation;
spriteDevice->SetTransform( D3DTS_WORLDMATRIX, &matrix );
spriteDevice->SetTexture(0,texture);
spriteDevice->DrawPrimitive(D3DPT_TRIANGLEFAN,0,2);
}

Related

OpenGL ES Drawing Shapes

I'm learning OpenGL ES to render for native android development. I'm able to get a triangle to draw. But I cannot get more than one triangle to draw. I just want to draw a rectangle, but if I tell the glVertexPointer function more than 3 vertices then it does not draw. I tried using GL_TRIANGLES and GL_TRIANGLE_STRIP.
What am I doing wrong?
struct Quad
{
GLfloat Vertices[18];
const GLbyte nNumVerts;
Quad(GLfloat i_fWidth, GLfloat i_fHeight) : nNumVerts(6)
{
GLfloat wide = i_fWidth / 2;
GLfloat high = i_fHeight / 2;
Vertices[0] = -wide; Vertices[1] = high; Vertices[2] = 0.0f;
Vertices[3] = -wide; Vertices[4] = -high; Vertices[5] = 0.0f;
Vertices[6] = wide; Vertices[7] = -high; Vertices[8] = 0.0f;
Vertices[9] = -wide; Vertices[10] = high; Vertices[11] = 0.0f;
Vertices[12] = wide; Vertices[13] = -high; Vertices[14] = 0.0f;
Vertices[15] = wide; Vertices[16] = high; Vertices[17] = 0.0f;
}
};
void Renderer::Render()
{
glClear(GL_COLOR_BUFFER_BIT);
glEnableClientState(GL_VERTEX_ARRAY);
// If I change "m_Quad.nNumVerts" to '3' instead of '6' it will draw a triangle
// Anything higher than '3' and it doesn't draw anything
glVertexPointer(m_Quad.nNumVerts, GL_FLOAT, 0, m_Quad.Vertices);
glDrawArrays(GL_TRIANGLES, 0, m_Quad.nNumVerts);
eglSwapBuffers(m_pEngine->pDisplay, m_pEngine->pSurface);
}
The first argument of glVertexPointer is the number of values per vertex, it should stays in 3 in this case.
glVertexPointer(3, GL_FLOAT, 0, m_Quad.Vertices);

CDLOD issue with lighting while morphing triangles

I have an Icosahedron subdivided and with LOD. Now i am trying to add a dynamic Material.
The Problem is that i need the Normals for that.
I use Unreal Engine 4, when i use the build in Function to calculate Normals i get strange Artifacts in the Area between different LOD Levels.
So i made my own calculation with this Code:
TArray<FVector> normals;
normals.Init(FVector(-1.f, -1.f, -1.f), geoData.GeoData.Num());
int32 triangleCount = geoData.Triangles.Num() / 3;
for (int32 i = 0; i < triangleCount; i++)
{
int32 normalTriangleIndex = i * 3;
int32 triangleIndexA = geoData.Triangles[normalTriangleIndex];
int32 triangleIndexB = geoData.Triangles[normalTriangleIndex + 1];
int32 triangleIndexC = geoData.Triangles[normalTriangleIndex + 2];
FVector pointA = geoData.GeoData[triangleIndexA];
FVector pointB = geoData.GeoData[triangleIndexB];
FVector pointC = geoData.GeoData[triangleIndexC];
FVector sideAB = pointB - pointA;
FVector sideBC = pointC - pointB;
FVector nNormal;
nNormal = FVector::CrossProduct(sideAB, sideBC);
nNormal = nNormal / nNormal.Size();
nNormal = nNormal.GetSafeNormal();
normals[triangleIndexA] = -nNormal;
normals[triangleIndexB] = -nNormal;
normals[triangleIndexC] = -nNormal;
}
Seems the calculation is correct. when i skip the calculation of morphing in my shader those artifacts disappear...
Maybe the Triangles are to close to each other while morphing them and this causes this artifacts?
This is how morphing looks like. (Source)
I solved it by skipping the first morph value.
Now morphing starts from the 2nd picture as you can see on the screenshot.
float morphFac(float dist, int lev)
{
float low = input_distanceLUT[lev - 1];
float high = input_distanceLUT[lev];
float delta = high - low;
float a = (dist - low) / delta;
float morphRange = 0.5f;
return 1.0f - clamp(a / morphRange, 0.1f, 1.0f);
}
Just a tiny change in the shader. clamp(a / morphRange, 0.1f, 1.0f);
0.1f was earlyer 0.0f

How to set particle color to black in cocos2d

I am trying to set my particle's color to be black like follows. My background color is gray, other colors like red shows up but the black doesn't. Isn't black RGB (0,0,0)? Thanks in advance.
startColor.r = 0.0f;
startColor.g = 0.0f;
startColor.b = 0.0f;
startColor.a = 1.0f;
startColorVar.r = 0.0f;
startColorVar.g = 0.0f;
startColorVar.b = 0.0f;
startColorVar.a = 0.0f;
endColor.r = 0.0f;
endColor.g = 0.0f;
endColor.b = 0.0f;
endColor.a = 1.0f;
endColorVar.r = 0.0f;
endColorVar.g = 0.0f;
endColorVar.b = 0.0f;
endColorVar.a = 0.0f;
self.blendFunc = (ccBlendFunc){GL_SRC_ALPHA, GL_DST_ALPHA};
Isn't black RGB (0,0,0)?
Precisely. cocos2d uses additive blending equation (glBlendEquation(GL_FUNC_ADD)), so with source color of RGB(0, 0, 0) and your blend function resulting color is equal to destination color (background).
cocos2d doesn't expose blending equation. You can use {GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA} as blend function or subclass CCParticleSystemand specify different blending equation in its draw method.

Projecting a 3D point to 2D screen coordinate OpenTK

Using Monotouch and OpenTK I am trying to get the screen coordinate of one 3D point. I have my world view projection matrix set up, and OpenGL makes sense of it and projects my 3D model perfectly, but how to use the same matrix to project just one point from 2D to 3D?
I thought I could simply use:
Vector3.Transform(ref input3Dpos, ref matWorldViewProjection, out projected2Dpos);
Then have the projected screen coordinate in projected2DPos. But the resulting Vector4 does not seem to represent the proper projected screen coordinate. And I do not know how to calculate it from there on.
I found I need to divide by Vector4.w, however I am still getting the wrong values. Using this method now:
private static bool GluProject(OpenTK.Vector3 objPos, OpenTK.Matrix4 matWorldViewProjection, int[] viewport, out OpenTK.Vector3 screenPos)
{
OpenTK.Vector4 _in;
_in.X = objPos.X;
_in.Y = objPos.Y;
_in.Z = objPos.Z;
_in.W = 1f;
Vector4 _out = OpenTK.Vector4.Transform(_in, matWorldViewProjection);
if (_out.W <= 0.0)
{
screenPos = OpenTK.Vector3.Zero;
return false;
}
_out.X /= _out.W;
_out.Y /= _out.W;
_out.Z /= _out.W;
/* Map x, y and z to range 0-1 */
_out.X = _out.X * 0.5f + 0.5f;
_out.Y = -_out.Y * 0.5f + 0.5f;
_out.Z = _out.Z * 0.5f + 0.5f;
/* Map x,y to viewport */
_out.X = _out.X * viewport[2] + viewport[0];
_out.Y = _out.Y * viewport[3] + viewport[1];
screenPos.X = _out.X;
screenPos.Y = _out.Y;
screenPos.Z = _out.Z;
return true;
}
I cannot see any errors though... :S
In the first question you're missing the last step: Mapping from NDC (Normalized Device Coordinates) to viewport coordinates. That's what the lines
/* Map x,y to viewport */
_out.X = _out.X * viewport[2] + viewport[0];
_out.Y = _out.Y * viewport[3] + viewport[1];
in your GluProject do,
You have two options. You can calculate it yourself, or use the glProject function. I prefer the first.
Number 1:
private Vector2 Convert(
Vector3 pos,
Matrix4 viewMatrix,
Matrix4 projectionMatrix,
int screenWidth,
int screenHeight)
{
pos = Vector3.Transform(pos, viewMatrix);
pos = Vector3.Transform(pos, projectionMatrix);
pos.X /= pos.Z;
pos.Y /= pos.Z;
pos.X = (pos.X + 1) * screenWidth / 2;
pos.Y = (pos.Y + 1) * screenHeight / 2;
return new Vector2(pos.X, pos.Y);
}
Number 2:
public Vector2 form3Dto2D(Vector3 our3DPoint)
{
Vector3 our2DPoint;
float[] modelviewMatrix = new float[16];
float[] projectionMatrix = new float[16];
int[] viewport = new int[4];
GL.GetFloat(GetPName.ModelviewMatrix, modelviewMatrix);
GL.GetFloat(GetPName.ProjectionMatrix, projectionMatrix);
GL.GetInteger(GetPName.Viewport, viewport);
OpenTK.Graphics.Glu.Project(our3DPoint, convertFloatsToDoubles(modelviewMatrix),
convertFloatsToDoubles(projectionMatrix), viewport, out our2DPoint);
return new Vector2(our2DPoint.X, our2DPoint.Y)
}
public static double[] convertFloatsToDoubles(float[] input)
{
if (input == null)
{
return null; // Or throw an exception - your choice
}
double[] output = new double[input.Length];
for (int i = 0; i < input.Length; i++)
{
output[i] = input[i];
}
return output;
}

why D3DXCreateCylinder made mesh coloring/material is not working?

This question is in continuation of "why D3DXCreateCylinder is not creating a cylinder?". I m able to draw the cylinder but it is only drawing it as .
The code is as follows
void draw_Cylinder(void){
D3DXMATRIX rot_matrix;
D3DXMATRIX trans_matrix;
D3DXMATRIX world_matrix;
static float rot_triangle=0.0f;
static float rot_triangle2=0.0f;
D3DXMatrixRotationY(&rot_matrix,rot_triangle); //Rotate the cylinder
D3DXMatrixRotationX(&rot_matrix,rot_triangle2); //Rotate the cylinder
D3DXMatrixTranslation(&trans_matrix,2.0f,0,20.0f); //Shift it 2 units to the left
D3DXMatrixMultiply(&world_matrix,&rot_matrix,&trans_matrix);
D3DMATERIAL9 material;// = new D3DMATERIAL9();
ZeroMemory( &material, sizeof(D3DMATERIAL9) );
// Set the RGBA for diffuse reflection.
material.Diffuse.r = 0.5f;
material.Diffuse.g = 0.0f;
material.Diffuse.b = 0.5f;
material.Diffuse.a = 1.0f;
// Set the RGBA for ambient reflection.
material.Ambient.r = 0.5f;
material.Ambient.g = 0.0f;
material.Ambient.b = 0.5f;
material.Ambient.a = 1.0f;
// Set the color and sharpness of specular highlights.
material.Specular.r = 1.0f;
material.Specular.g = 1.0f;
material.Specular.b = 1.0f;
material.Specular.a = 1.0f;
material.Power = 2.0f;
// Set the RGBA for emissive color.
material.Emissive.r = 0.0f;
material.Emissive.g = 0.0f;
material.Emissive.b = 0.0f;
material.Emissive.a = 0.0f;
g_d3d_device->SetMaterial(&material);
g_d3d_device->SetTexture(0,NULL);
g_d3d_device->SetTransform(D3DTS_WORLD,&world_matrix);
m_ppMeshCylinder->DrawSubset(0);
////Render from our Vertex Buffer
//g_d3d_device->DrawPrimitive(D3DPT_TRIANGLELIST, //PrimitiveType
// 0, //StartVertex
// g_pyramid_count); //PrimitiveCount
rot_triangle+=0.0007f;
if(rot_triangle > D3DX_PI*2)
{
rot_triangle-=D3DX_PI*2;
}
rot_triangle2+=0.0007f;
if(rot_triangle2 > D3DX_PI*2)
{
rot_triangle2-=D3DX_PI*2;
}
}
or download the project.
I have attracted my codes here "project code"
I want to draw it having 3D shades what generally any 3D mesh has if rendered by default.
I am not nicely aware of materials. Or is it the problem with graphics card (I just thought :D ).
In addition where can I get information and samples abt SetRenderState
Try
g_d3d_device->SetRenderState( D3DRS_DIFFUSEMATERIALSOURCE, D3DMCS_MATERIAL );
At the moment it is defaulting to using "Color 1" which is the first of the 2 possible vertex colours.