PhysX - simulate() never ends if GPU used - c++

Im still working on physics system for simulating fluids. I rewrote my application to use PhysX 3.3.0 and to be more objective and now i have a problem which i cant resolve for like a week or two.
This is my initiation of PhysX Context:
void PhysXSPH::initContext(void){
static LogPxErrorCallback gLogPxErrorCallback;
static PxDefaultAllocator gDefaultAllocatorCallback;
mFoundation = PxCreateFoundation(PX_PHYSICS_VERSION, gDefaultAllocatorCallback, gLogPxErrorCallback);
check(mFoundation, "PxFoundation creating failed!");
static PxProfileZoneManager *mProfileZoneManager = &PxProfileZoneManager::createProfileZoneManager(mFoundation);
check(mProfileZoneManager, "PxProfileZoneManager creation failed!");
bool recordMemoryAllocations = true;
mPhysics = PxCreateBasePhysics(PX_PHYSICS_VERSION, *mFoundation,
PxTolerancesScale(), recordMemoryAllocations, mProfileZoneManager );
check(mPhysics, "PxPhysics creating failed!");
PxRegisterParticles(*mPhysics);
if(!PxInitExtensions(*mPhysics)){
check(NULL, "PxInitExtensions failed!");
}
static PxSimulationFilterShader gDefaultFilterShader = PxDefaultSimulationFilterShader;
PxSceneDesc sceneDesc(mPhysics->getTolerancesScale());
sceneDesc.gravity = PxVec3(0.0f, -9.81f, 0.0f);
if(!sceneDesc.cpuDispatcher){
mCpuDispatcher = PxDefaultCpuDispatcherCreate(4);
check(mCpuDispatcher, "PxDefaultCpuDispatcherCreate failed!");
sceneDesc.cpuDispatcher = mCpuDispatcher;
}
if(!sceneDesc.filterShader){
sceneDesc.filterShader = gDefaultFilterShader;
}
#ifdef PX_WINDOWS
PxCudaContextManagerDesc cudaContextManagerDesc;
mCudaContextManager = PxCreateCudaContextManager(*mFoundation, cudaContextManagerDesc, mProfileZoneManager);
if( mCudaContextManager ){
if( !mCudaContextManager->contextIsValid() ){
mCudaContextManager->release();
mCudaContextManager = NULL;
CLOG(ERROR, "physX")<<"Invalid CUDA context.";
exit(EXIT_FAILURE);
}
if(!sceneDesc.gpuDispatcher){
sceneDesc.gpuDispatcher = mCudaContextManager->getGpuDispatcher();
}
CLOG(INFO, "physX")<<"CUDA context created.";
} else {
CLOG(ERROR, "physX")<<"Creating CUDA context manager failed.";
exit(EXIT_FAILURE);
}
#endif
mScene = mPhysics->createScene(sceneDesc);
check(mScene, "createScene failed!");
createScene(mScene);
}
and initiation of a physX scene but problem occures even with an empty scene:
void PhysXSPH::createScene(PxScene *mScene){
mScene->setVisualizationParameter(PxVisualizationParameter::eSCALE, 1.0);
mScene->setVisualizationParameter(PxVisualizationParameter::eCOLLISION_SHAPES, 1.0f);
createPlanes(mScene);
createParticles(mScene);
CLOG(INFO, "physX") << "PhysX scene created.";
}
void PhysXSPH::createPlanes(PxScene *mScene){
PxMaterial* mMaterial = mPhysics->createMaterial(0.5,0.5,0.5);
//Create actors
//1) Create ground plane
PxReal d = 0.0f;
PxTransform pose = PxTransform(PxVec3(0.0f, 0, 0.0f),PxQuat(PxHalfPi, PxVec3(0.0f, 0.0f, 1.0f)));
PxRigidStatic* plane = mPhysics->createRigidStatic(pose);
check(plane, "Creating plane failed!");
//create 4 more planes for aquarium
PxRigidStatic* plane2 = PxCreatePlane(*mPhysics, PxPlane(PxVec3(-4.0f, 0.0, 0.0), PxVec3(1.0, 0.0, 0.0)), *mMaterial);
PxRigidStatic* plane3 = PxCreatePlane(*mPhysics, PxPlane(PxVec3(4.0f, 0.0, 0.0), PxVec3(-1.0, 0.0, 0.0)), *mMaterial);
PxRigidStatic* plane4 = PxCreatePlane(*mPhysics, PxPlane(PxVec3(0.0f, 0.0, -4.0f), PxVec3(0.0, 0.0, 1.0)), *mMaterial);
PxRigidStatic* plane5 = PxCreatePlane(*mPhysics, PxPlane(PxVec3(0.0f, 0.0, 4.0f), PxVec3(0.0, 0.0, -1.0)), *mMaterial);
// create shape
PxShape* shape = plane->createShape(PxPlaneGeometry(), *mMaterial);
check(shape, "Creating shape failed!");
mScene->addActor(*plane);
PxShape* shape2 = plane2->createShape(PxPlaneGeometry(), *mMaterial);
check(shape2, "Creating shape failed!");
mScene->addActor(*plane2);
PxShape* shape3 = plane3->createShape(PxPlaneGeometry(), *mMaterial);
check(shape3, "Creating shape failed!");
mScene->addActor(*plane3);
PxShape* shape4 = plane4->createShape(PxPlaneGeometry(), *mMaterial);
check(shape4, "Creating shape failed!");
mScene->addActor(*plane4);
PxShape* shape5 = plane5->createShape(PxPlaneGeometry(), *mMaterial);
check(shape5, "Creating shape failed!");
mScene->addActor(*plane5);
}
void PhysXSPH::createParticles(PxScene *mScene){
// set immutable properties.
bool perParticleRestOffset = false;
//get data from scene model
int maxParticles = scene->getMaxParticles();
int xDim = scene->getXDim();
int yDim = scene->getYDim();
int zDim = scene->getZDim();
// create particle system in PhysX SDK
particleSystem = mPhysics->createParticleFluid(maxParticles, perParticleRestOffset);
check(particleSystem, "Creating particle system failed!");
particleSystem->setRestOffset(particleRadius);
particleSystem->setRestParticleDistance(particleRadius);
particleSystem->setParticleBaseFlag(PxParticleBaseFlag::eGPU,true);
// TODO set fluid parameters
// add particle system to scene, in case creation was successful
if (particleSystem)
mScene->addActor(*particleSystem);
indexes = new PxU32[maxParticles];
particle_positions = new PxVec3[maxParticles];
int index=0;
for(int x=0; x<xDim ;x++){
for(int y=0; y<yDim ;y++){
for(int z=0; z<zDim; z++){
indexes[index]=(PxU32)index;
int v=3*index;
particle_positions[index]=PxVec3((physx::PxReal)(scene->m_vPos[v]), (physx::PxReal)(scene->m_vPos[v+1]), (physx::PxReal)(scene->m_vPos[v+2]));
//CLOG(INFO, "physX")<<index<<"["<<particle_positions[index].x<<"; "<<particle_positions[index].y<<"; "<<particle_positions[index].z<<"]";
index++;
}
}
}
PxParticleCreationData particleCreationData;
particleCreationData.numParticles = maxParticles;
particleCreationData.indexBuffer = PxStrideIterator<const PxU32>(indexes);
particleCreationData.positionBuffer = PxStrideIterator<const PxVec3>(particle_positions);
// create particles in *PxParticleSystem* ps
bool success = particleSystem->createParticles(particleCreationData);
if(!success){
CLOG(ERROR, "physX")<<"Creating particles failed.";
exit(EXIT_FAILURE);
}
}
If code in the #ifdef PX_WINDOWS is commented, everything works fine. Fluid flows like it should. But when I try to use my GPU, application freezes on first fetchResult() invokation (simulate() method never finishes its job). I have no error log, it just freezes. It happens no matter if its DEBUG or RELEASE, and if it is 32 or 64 build.
I have a GeForce 560Ti. I use Physx SDK 3.3.0. I link (eg. on win64 debug build) vs:
opengl32.lib glew32.lib glfw3.lib PhysX3DEBUG_x64.lib
PhysX3CommonDEBUG_x64.lib PxTaskDEBUG.lib PhysX3ExtensionsDEBUG.lib
PhysXProfileSDKDEBUG.lib
using:
nvToolsExt64_1.dll PhysX3XHECKED_x64.dll PhysX3CommonCHECKED_x64.dll
PhysX3GpuCHECKED_x64.dll PhysX3GpuDEBUG_x64.dll
I tried to use different versions of .libs and add .dlls wanted by the application, but every set ended up with a freezing on fetchResult().
I have no idea where to look for mistakes. Everything looks fine. I'd be thankful for any help!

I know this is an old thread but I had the exact same problem when I switched from 3.2.5 to 3.3.0.
I did find a solution. The problem is that you initialize the extensions twice here. You are using PxCreateBasePhysics to create your SDK object which does some extra work in the background. Namely it calls PxInitExtensions if I'm not mistaken.
The fix is to just change the PxCreateBasePhysics function to the standard PxCreatePhysics call with the exact same arguments. This one doesn't do any additional setups behind the scene. Just leaving out the InitExtensions call might work as well but I have only tried the first idea.
It is an odd thing that this only causes the freeze if GPU is in use, maybe nVidia should look at it.

Related

How could I create a rigid body from a triangle mesh in PhysX?

I have recently been working on a project that involves the PhysX library. I need to create a PxRigidDynamic actor from a triangle mesh. I saw the example that the documentation provided, but I have some issues with this method.
The issues that I had were mainly:
The object wasn't effected by gravity, and would just sit there and vibrate (Although, I have a feeling I did something else wrong)
From what I have read, to create a rigid body from a triangle mesh, the object needs to be kinematic. This then throws an error when I try to change the object's velocity.
Here is how I am currently creating the triangle mesh:
PhysX::PxTriangleMeshDesc meshDesc;
meshDesc.points.count = numVertices;
meshDesc.points.stride = sizeof(float) * 3;
meshDesc.points.data = vertices;
meshDesc.triangles.count = numIndices;
meshDesc.triangles.stride = 3 * sizeof(uint32_t);
meshDesc.triangles.data = indices;
PhysX::PxDefaultMemoryOutputStream writeBuffer;
PhysX::PxTriangleMeshCookingResult::Enum result;
bool status = cooking->cookTriangleMesh(meshDesc, writeBuffer, &result);
if (!status)
return false;
PhysX::PxDefaultMemoryInputData readBuffer(writeBuffer.getData(),
writeBuffer.getSize());
PhysX::PxTriangleMesh* triangleMesh =
physics->createTriangleMesh(readBuffer);
dynamicObject = physics->createRigidDynamic(PhysX::PxTransform(PhysX::PxVec3(0.0f)));
if (!dynamicObject)
return false;
dynamicObject->setMass(10.0f);
dynamicObject->setRigidBodyFlag(
PhysX::PxRigidBodyFlag::eKINEMATIC, true);
material = physics->createMaterial(0.5f, 0.5f, 0.6f);
if (!material)
return false;
shape = PhysX::PxRigidActorExt::createExclusiveShape(
*dynamicObject,
PhysX::PxTriangleMeshGeometry(triangleMesh,
PhysX::PxMeshScale(PhysX::PxVec3(scale.x, scale.y, scale.z))),
*material
);
if (!shape)
return false;
How can I solve these issues?
If this isn't possible, I would like to know how to convert a triangle mesh into a convex mesh.

Unreal Engine 4 C++ Change StaticMeshComponent of AStaticMeshActor

I am trying to change every Static Mesh Actor's Material property. I know I need to iterate over each Actor and find Static Mesh Component. However, I can not modify Material properly.
Here is my code,
GEngine->AddOnScreenDebugMessage(-1, 5.f, FColor::Red, TEXT("START Modeling()"));
//Find Actor and change Material
UWorld* world = GetWorld();
//Material Path
FString matPath = "Material'/Game/StarterContent/Materials/M_Metal_Gold.M_Metal_Gold'";
//Material Instance
UMaterialInstanceConstant* material = Cast<UMaterialInstanceConstant>(StaticLoadObject(UMaterialInstanceConstant::StaticClass(), nullptr, *(matPath)));
//Iterate Every Static Mesh Actor
for (TActorIterator<AStaticMeshActor> ActorItr(world); ActorItr; ++ActorItr)
{
AStaticMeshActor *Mesh = *ActorItr;
//Just for Degbuging Purpose
GEngine->AddOnScreenDebugMessage(-1, 5.f, FColor::Red, FString::Printf(TEXT("Actor: %s"), *(ActorItr->GetName())));
GEngine->AddOnScreenDebugMessage(-1, 5.f, FColor::Red, FString::Printf(TEXT("Location: %s"), *(ActorItr->GetActorLocation().ToString())));
//Get Static Mesh Component
TArray<UStaticMeshComponent*> MaterialComps;
Mesh->GetComponents(MaterialComps);
//I get this code from community answer. I do not know how it works.
for (int32 Index = 0; Index != MaterialComps.Num(); ++Index)
{
UStaticMeshComponent* targetComp = MaterialComps[Index];
int32 mCnt = targetComp->GetNumMaterials();
for (int i = 0; i < mCnt; i++)
//This is the core code which actually changing material.
targetComp->SetMaterial(0, material);
}
}
Before
After
It is occurred to me because I am changing every actor by iteration.
However, It only change floor actor's material.
You are changing only the material with the index 0.
Change targetComp->SetMaterial(0, material); to targetComp->SetMaterial(i, material);. It should fix your problem.

GamePlay3d engine won't show model imported from fbx

I am a newbie with gameplay3d and went through all tutorials, however I cant manage to display this simple(not much polygons and material) model that I encoded from Fbx. I checked the model with unity3D, and a closed source software that uses gameplay3d and all seems to fine. I guess I am missing some detail loading the scene.
This is the model file including also the original fbx file. I suspect if it has something to do with light
https://www.dropbox.com/sh/ohgpsfnkm3iv24s/AACApRcxwtbmpKu4_5nnp8rZa?dl=0
This is the class that loads the scene.
#include "Demo.h"
// Declare our game instance
Demo game;
Demo::Demo()
: _scene(NULL), _wireframe(false)
{
}
void Demo::initialize()
{
// Load game scene from file
Bundle* bundle = Bundle::create("KGN56AI30N.gpb");
_scene = bundle->loadScene();
SAFE_RELEASE(bundle);
// Get the box model and initialize its material parameter values and bindings
Camera* camera = Camera::createPerspective(45.0f, getAspectRatio(), 1.0f, 20.0f);
Node* cameraNode = _scene->addNode("camera");
// Attach the camera to a node. This determines the position of the camera.
cameraNode->setCamera(camera);
// Make this the active camera of the scene.
_scene->setActiveCamera(camera);
SAFE_RELEASE(camera);
// Move the camera to look at the origin.
cameraNode->translate(0,0, 10);
cameraNode->rotateX(MATH_DEG_TO_RAD(0.25f));
// Update the aspect ratio for our scene's camera to match the current device resolution
_scene->getActiveCamera()->setAspectRatio(getAspectRatio());
// Set the aspect ratio for the scene's camera to match the current resolution
_scene->getActiveCamera()->setAspectRatio(getAspectRatio());
Light* directionalLight = Light::createDirectional(Vector3::one());
_directionalLightNode = Node::create("directionalLight");
_directionalLightNode->setLight(directionalLight);
SAFE_RELEASE(directionalLight);
_scene->addNode(_directionalLightNode);
_scene->setAmbientColor(1.0, 1.0, 1.0);
_scene->visit(this, &Demo::initializeMaterials);
}
bool Demo::initializeMaterials(Node* node)
{
Model* model = dynamic_cast<Model*>(node->getDrawable());
if (model)
{
for(int i=0;i<model->getMeshPartCount();i++)
{
Material* material = model->getMaterial(i);
if(material)
{
// For this sample we will only bind a single light to each object in the scene.
MaterialParameter* colorParam = material->getParameter("u_directionalLightColor[0]");
colorParam->setValue(Vector3(0.75f, 0.75f, 0.75f));
MaterialParameter* directionParam = material->getParameter("u_directionalLightDirection[0]");
directionParam->setValue(Vector3(1, 1, 1));
}
}
}
return true;
}
void Demo::finalize()
{
SAFE_RELEASE(_scene);
}
void Demo::update(float elapsedTime)
{
// Rotate model
//_scene->findNode("box")->rotateY(MATH_DEG_TO_RAD((float)elapsedTime / 1000.0f * 180.0f));
}
void Demo::render(float elapsedTime)
{
// Clear the color and depth buffers
clear(CLEAR_COLOR_DEPTH, Vector4::zero(), 1.0f, 0);
// Visit all the nodes in the scene for drawing
_scene->visit(this, &Demo::drawScene);
}
bool Demo::drawScene(Node* node)
{
// If the node visited contains a drawable object, draw it
Drawable* drawable = node->getDrawable();
if (drawable)
drawable->draw(_wireframe);
return true;
}
void Demo::keyEvent(Keyboard::KeyEvent evt, int key)
{
if (evt == Keyboard::KEY_PRESS)
{
switch (key)
{
case Keyboard::KEY_ESCAPE:
exit();
break;
}
}
}
void Demo::touchEvent(Touch::TouchEvent evt, int x, int y, unsigned int contactIndex)
{
switch (evt)
{
case Touch::TOUCH_PRESS:
_wireframe = !_wireframe;
break;
case Touch::TOUCH_RELEASE:
break;
case Touch::TOUCH_MOVE:
break;
};
}
I can't download your dropbox .fbx file. How many models do you have in the scene? Here's a simple way of doing what you want to do -- not optimal, but it'll get you started...
So first off, I can't see where in your code you actually assign a Shader to be used with the material. I use something like this:
material = model->setMaterial("Shaders/Animation/ADSVertexViewAnim.vsh", "Shaders/Animation/ADSVertexViewAnim.fsh");
You need to assign a Shader, and the above code will take the vertex and fragment shaders and use that when the object needs to be drawn.
I went about it a slightly different way by not loading the scene file automatically, but creating an empty scene and then extracting my model from the bundle and adding it to the scene manually. That way, I can see exactly what is happening and I'm in control of each step. GamePlay3D has some fancy property files, but use them only once you know how the process works manually..
Initially, I created a simple cube in a scene, and created a scene manually, and added the monkey to the node graph, as follows:
void GameMain::ExtractFromBundle()
{
/// Create a new empty scene.
_scene = Scene::create();
// Create the Model and its Node
Bundle* bundle = Bundle::create("res/monkey.gpb"); // Create the bundle from GPB file
/// Create the Cube
{
Mesh* meshMonkey = bundle->loadMesh("Character_Mesh"); // Load the mesh from the bundle
Model* modelMonkey = Model::create(meshMonkey);
Node* nodeMonkey = _scene->addNode("Monkey");
nodeMonkey->setTranslation(0,0,0);
nodeMonkey->setDrawable(modelMonkey);
}
}
Then I want to search the scene graph and only assign a material to the object that I want to draw (the monkey). Use this if you want to assign different materials to different objects manually...
bool GameMain::initializeScene(Node* node)
{
Material* material;
std::cout << node->getId() << std::endl;
// find the node in the scene
if (strcmp(node->getId(), "Monkey") != 0)
return false;
Model* model = dynamic_cast<Model*>(node->getDrawable());
if( !model )
return false;
material = model->setMaterial("Shaders/Animation/ADSVertexViewAnim.vsh", "Shaders/Animation/ADSVertexViewAnim.fsh");
material->getStateBlock()->setCullFace(true);
material->getStateBlock()->setDepthTest(true);
material->getStateBlock()->setDepthWrite(true);
// The World-View-Projection Matrix is needed to be able to see view the 3D world thru the camera
material->setParameterAutoBinding("u_worldViewProjectionMatrix", "WORLD_VIEW_PROJECTION_MATRIX");
// This matrix is necessary to calculate normals properly, but the WORLD_MATRIX would also work
material->setParameterAutoBinding("u_worldViewMatrix", "WORLD_VIEW_MATRIX");
material->setParameterAutoBinding("u_viewMatrix", "VIEW_MATRIX");
return true;
}
Now the object is ready to be drawn.... so I use these functions:
void GameMain::render(float elapsedTime)
{
// Clear the color and depth buffers
clear(CLEAR_COLOR_DEPTH, Vector4(0.0, 0.0, 0.0, 0.0), 1.0f, 0);
// Visit all the nodes in the scene for drawing
_scene->visit(this, &GameMain::drawScene);
}
bool GameMain::drawScene(Node* node)
{
// If the node visited contains a drawable object, draw it
Drawable* drawable = node->getDrawable();
if (drawable)
drawable->draw(_wireframe);
return true;
}
I use my own shaders, so I don't have to worry about Light and DirectionalLight and all that stuff. Once I can see the object, then I'll add dynamic lights, etc, but for starters, start simple.
Regards.

In OpenTK, multiple meshes are not transforming when imported using Assimp

I'm sure there is an answer to this on the web but I can't find it.
I'm importing a scene from Blender that has multiple meshes, into OpenTK.
The library I'm using to import is Assimp-net, and the file format is Collada (.dae).
I have created a spaceship with multiple parts, each part being a mesh.
Now when I import and draw, the geometry of the objects looks fine and materials work as expected. However, the different parts are not rotated, scaled, or translated as they appear in Blender. What happens is the different parts are not connected, and some appear larger/smaller than they should, in the wrong place etc.
Is there a setting I'm missing when I export from Blender, or is there some Assimp member/function I can use to transform the meshes before I render them?
Importing the file:
string filename = #"C:\Path\ship.dae";
Scene ship;
//Create a new importer
AssimpImporter importer = new AssimpImporter();
//This is how we add a configuration (each config is its own class)
NormalSmoothingAngleConfig config = new NormalSmoothingAngleConfig(66.0f);
importer.SetConfig(config);
//Import the model
ship = importer.ImportFile(filename, PostProcessPreset.TargetRealTimeMaximumQuality);
//End of example
importer.Dispose();
Drawing the meshes(entire "RenderFrame" event handler in OpenTK):
// Clear color/depth buffers
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
// Define world space
GL.MatrixMode(MatrixMode.Projection);
GL.LoadIdentity();
GL.Ortho(-15.0, 15.0, -15.0, 15.0, 15.0, -15.0);
// Rotate around X and Y axes for better viewing
rotateX(xrot);
rotateY(yrot);
GL.Enable(EnableCap.ColorMaterial);
var rootnode = wes10.RootNode;
foreach (Node node in rootnode.Children)
{
//for each node, do
GL.MatrixMode(MatrixMode.Modelview); //ensure your current matrix is the model matrix.
GL.PushMatrix(); //save current model matrix so you can undo next transformations;
var meshIndices = node.MeshIndices;
if (meshIndices == null)
continue;
else
{
Matrix4d convertedTransform = new Matrix4d();
getConvertedMatrix(node.Transform, ref convertedTransform);
GL.MultMatrix(ref convertedTransform);
GL.Begin(BeginMode.Triangles);
foreach (uint i in meshIndices)
{
Mesh mesh = wes10.Meshes[i];
Material mat = wes10.Materials[mesh.MaterialIndex];
// Material setup
var spec_color = mat.ColorSpecular;
var amb_color = mat.ColorAmbient;
var diff_color = mat.ColorDiffuse;
float[] mat_specular = { spec_color.R, spec_color.G, spec_color.B, spec_color.A };
float[] mat_ambient = { amb_color.R, amb_color.G, amb_color.B, amb_color.A };
float[] mat_diffuse = { diff_color.R, diff_color.G, diff_color.B, diff_color.A };
float[] mat_shininess = { 0.0f };
GL.Material(MaterialFace.FrontAndBack, MaterialParameter.Specular, mat_specular);
GL.Material(MaterialFace.FrontAndBack, MaterialParameter.Ambient, mat_ambient);
GL.Material(MaterialFace.FrontAndBack, MaterialParameter.Diffuse, mat_diffuse);
GL.Material(MaterialFace.FrontAndBack, MaterialParameter.Shininess, mat_shininess);
foreach (Face face in mesh.Faces)
{
foreach (uint indice in face.Indices)
{
var normal = mesh.Normals[indice];
var pos = mesh.Vertices[indice];
//var tex = mesh.GetTextureCoords(0)[v];
//GL.TexCoord2(tex.X, tex.Y);
GL.Normal3(normal.X, normal.Y, normal.Z);
GL.Vertex3(pos.X, pos.Y, pos.Z);
}
}
}
}
GL.PopMatrix();
}
GL.End();
game.SwapBuffers();
Updated to use suggestions.
In the c example, there is a transformation matrix per node...
aiMultiplyMatrix4(trafo,&nd->mTransformation);
Check this:
Data structure
scene graph.
If you don't know what to do with that matrix, check this to learn about matrix stack. (Be aware that modern OpenGL recommand to implement your own transformation matrix)
Golobaly, you need the folowing steps for rendering (read the c example for details):
//for each node, do
glMatrixMode (GL_MODELVIEW); //ensure your current matrix is the model matrix.
glPushMatrix (); //save current model matrix so you can undo next transformations;
glMultMatrixf(Transformation);//apply your node matrix
//render your node, in your example it's surely a mesh
glPopMatrix (); //restore model matrix

Load a .obj model with ASSIMP in DirectX9

This is my first time posting. I have this issue with this 3d model loading library called ASSIMP. I am trying to integrate it in a sample Direct3d9 app. and it is not going so well. I am an experienced C++ programmer so it shouldn't take too much hassle to help me :). So i have in the past made several d3d9 apps and rendered manual primitives. but now i am trying to render an obj model loaded with ASSIMP. when i try to render it, NOTHING is rendered at all. this is very weird, not even one poly is rendered. this is VERY frustrating as i have spent 1 week trying to fix this one problem and searching on google returns no useful results. you guys are honestly my last hope lol. ok so here is my code. pretty please take a look and help me understand what i am doing wrong. also if you know of a link where a directx9 ASSIMP example may be that would also be appreciated as google only shows OpenGL :(. Any help will be much appreciated thanks :)
bool Mesh::LoadMesh(const std::string& Filename)
{
Assimp::Importer Importer;
const aiScene *pScene = NULL;
const aiMesh *pMesh = NULL;
pScene = Importer.ReadFile(Filename.c_str(), aiProcess_Triangulate | aiProcess_ConvertToLeftHanded | aiProcess_ValidateDataStructure | aiProcess_FindInvalidData);
if (!pScene)
{
printf("Error parsing '%s': '%s'\n", Filename.c_str(), Importer.GetErrorString());
return false;
}
pMesh = pScene->mMeshes[0];
if (!pMesh)
{
printf("Error Finding Model In file. Did you export an empty scene?");
return false;
}
for (unsigned int i = 0; i < pMesh->mNumFaces; i++)
{
if (pMesh->mFaces[i].mNumIndices == 3)
{
m_NumIndices = m_NumIndices + 3;
}
else
{
printf("Error parsing Faces. Try to Re-Export model from 3d package!");
return false;
}
}
m_NumFaces = pMesh->mNumFaces;
m_NumVertecies = pMesh->mNumVertices;
ZeroMemory(&m_pVB, sizeof(m_pVB));
m_pRenderDevice->CreateVertexBuffer(sizeof(Vertex) * m_NumVertecies, 0, VertexFVF, D3DPOOL_DEFAULT, &m_pVB, NULL);
m_pVB->Lock(0, 0, (void**)&m_pVertecies, 0);
for (int i = 0; i < pMesh->mNumVertices; i++)
{
Vertex *pvertex = new Vertex(D3DXVECTOR3(pMesh->mVertices[i].x, pMesh->mVertices[i].y, pMesh->mVertices[i].z), D3DXVECTOR2(pMesh->mTextureCoords[0][i].x, pMesh->mTextureCoords[0][i].y), D3DXVECTOR3(pMesh->mNormals[i].x, pMesh->mNormals[i].y, pMesh->mNormals[i].z));
m_pVertecies[i] = pvertex;
}
m_pVB->Unlock();
return true;
}
void Mesh::Render()
{
m_pRenderDevice->SetStreamSource(0, m_pVB, 0, sizeof(Vertex));
m_pRenderDevice->SetFVF(VertexFVF);
m_pRenderDevice->DrawPrimitive(D3DPT_TRIANGLELIST, 0, m_NumFaces);
}
void Render()
{
D3DCOLOR Color = D3DCOLOR_ARGB(255, 0, 0, 255);
//Clear the Z and Back buffers
g_pRenderDevice->Clear(0, NULL, D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER, Color, 1.0f, 0);
g_pRenderDevice->BeginScene();
InitializeViewMatrix();
D3DXMATRIX Scale;
D3DXMatrixScaling(&Scale, CameraScaleX, CameraScaleY, CameraScaleZ);
D3DXMATRIX Rotation;
CameraRotX += 0.025;
D3DXMatrixRotationYawPitchRoll(&Rotation, CameraRotX, CameraRotY, CameraRotZ);
g_pRenderDevice->SetTransform(D3DTS_WORLD, &D3DXMATRIX(Scale * Rotation));
if (pMesh)
{
pMesh->Render();
}
g_pRenderDevice->EndScene();
g_pRenderDevice->Present(NULL, NULL, NULL, NULL);
}
I might be getting old but i can't find anything wrong in this code. Are you sure your pointers are all pointing where they should?