lwjgl model is not rotating at its center - opengl

I have an entity(triangle),when am trying to rotate its rotating in circular motion.When i applied the projection and transformation matrix its not centered either,it is to the right by some distance from center, i dont even know why its not centered
transformation matrix along with vertices
float[] vertices = {
1409.598f, -58.85f, 1471.946f,
1460.572f, -58.9047f, 1462.047f,
1408.506f, -20.5531f, 1471.137f
};
public static Matrix4f createTransformationMatrix(Vector3f entity, float rx, float ry,
float rz, float scale) {
Matrix4f matrix = new Matrix4f();
matrix.setIdentity();
Matrix4f.translate(entity, matrix, matrix);
Matrix4f.rotate((float) Math.toRadians(rx), new Vector3f(1,0,0), matrix, matrix);
Matrix4f.rotate((float) Math.toRadians(ry), new Vector3f(0,1,0), matrix, matrix);
Matrix4f.rotate((float) Math.toRadians(rz), new Vector3f(0,0,1), matrix, matrix);
Matrix4f.scale(new Vector3f(scale,scale,scale), matrix, matrix);
return matrix;
}
Any help ?
Projection matrix:
private static final float FOV = 70;
private static final float NEAR_PLANE = 0.1f;
private static final float FAR_PLANE = 10000;
private void createProjectionMatrix() {
float aspectRatio = (float) Display.getWidth() / (float) Display.getHeight();
float y_scale = (float) ((1f / Math.tan(Math.toRadians(FOV / 2f))) * aspectRatio);
float x_scale = y_scale / aspectRatio;
float frustum_length = FAR_PLANE - NEAR_PLANE;
projectionMatrix = new Matrix4f();
projectionMatrix.m00 = x_scale;
projectionMatrix.m11 = y_scale;
projectionMatrix.m20 = 0f;
projectionMatrix.m21 = 0f;
projectionMatrix.m22 = -((FAR_PLANE + NEAR_PLANE) / frustum_length);
projectionMatrix.m23 = -1;
projectionMatrix.m32 = -((2 * NEAR_PLANE * FAR_PLANE) / frustum_length);
projectionMatrix.m33 = 0;
}
View Matrix:
public static Matrix4f createViewMatrix(Camera camera) {
Matrix4f viewMatrix = new Matrix4f();
viewMatrix.setIdentity();
Matrix4f.rotate((float) Math.toRadians(camera.getPitch()), new Vector3f(1, 0, 0), viewMatrix,
viewMatrix);
Matrix4f.rotate((float) Math.toRadians(camera.getYaw()), new Vector3f(0, 1, 0), viewMatrix, viewMatrix);
Vector3f cameraPos = camera.getPosition();
Vector3f negativeCameraPos = new Vector3f(-cameraPos.x, -cameraPos.y, -cameraPos.z);
Matrix4f.translate(negativeCameraPos, viewMatrix, viewMatrix);
return viewMatrix;
}
Vertex Shader:
#version 400 core
in vec3 position;
uniform mat4 transformationMatrix;
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
void main(void){
gl_Position = projectionMatrix * viewMatrix * transformationMatrix * vec4(position,1.0);
}

I think you have an error in projection matrix. Use this line:
float y_scale = 1f / Math.tan(Math.toRadians(FOV / 2f));

Related

Rotating a 3D cube using opengl glm

I create a cube like normal using 8 vertex points that outline a cube and use indices to draw each individual triangle. However, when I create my camera matrix and rotate it using the lookat function with glm it rotates the entire screen positions not world positions.
glm::mat4 Projection = glm::mat4(1);
Projection = glm::perspective(glm::radians(60.0f), (float)window_width / (float)window_hight, 0.1f, 100.0f);
const float radius = 10.0f;
float camX = sin(glfwGetTime()) * radius;
float camZ = cos(glfwGetTime()) * radius;
glm::mat4 View = glm::mat4(1);
View = glm::lookAt(
glm::vec3(camX, 0, camZ),
glm::vec3(0, 0, 0),
glm::vec3(0, 1, 0)
);
glm::mat4 Model = glm::mat4(1);
glm::mat4 mvp = Projection * View * Model;
Then in glsl:
uniform mat4 camera_mat4
void main()
{
vec4 pos = vec4(vertexPosition_modelspace, 1.0) * camera_mat4;
gl_Position.xyzw = pos;
}
Example: GLM rotating screen coordinates not cube

How do you calculate a perspective projection matrix?

Right now I have the ability to scale, rotate, and translate points by using a matrix.
// I use a left to right multiplying style (scale, rotate, then translate)
Matrix model = Matrix::Scale(0.4f) * Matrix::Rotation(45.0f, Vector3(0.0f, 0.0f, 1.0f)) * Matrix::Translation(Vector3(0.0f, 0.5f)).Transposed();
// vertex shader code
#version 460 core
layout (location = 0) in vec3 vertexPosition;
uniform mat4 model;
void main() {
gl_Position = model * vec4(vertexPosition, 1.0);
}
The main problem I'm having is creating a perspective projection matrix.
static Matrix Projection(float verticalFoV, float aspectRatio, float zNear, float zFar) {
// is this even correct?
float yScale = (1.0f / tan(verticalFoV / 2.0f)) * aspectRatio;
float xScale = yScale / aspectRatio;
float frustumLength = zFar - zNear;
return Matrix({
xScale, 0, 0, 0,
0, yScale, 0, 0,
0, 0, -((zFar + zNear) / frustumLength), -((2.0f * zNear * zFar) / frustumLength),
0, 0, -1.0f, 0
});
}
Which would then be used like this.
Matrix projection = Matrix::Projection(70.0f * DegreesToRadians, screenWidth / screenHeight, 0.1f, 100.0f);
I send over the matrices without transposing them.
glUniformMatrix4fv(glGetUniformLocation(shaderProgram, "model"), 1, false, &model[0][0]);
glUniformMatrix4fv(glGetUniformLocation(shaderProgram, "projection"), 1, false, &projection[0][0]);
And I want to be able to multiply them left to right in the vertex shader.
#version 460 core
layout (location = 0) in vec3 vertexPosition;
uniform mat4 model;
uniform mat4 projection;
void main() {
// I'm eventually gonna add view so it'd look like this
// gl_Position = model * view * projection * vec4(vertexPosition, 1.0);
gl_Position = model * projection * vec4(vertexPosition, 1.0);
}
P.S: I want to use a left handed coordinate system. (Right = +X, Up = +Y, Forward = +Z)
OpenGL matrices are stored with column major order. Your matrices are stored with row major order. Hence, you have to multiply the matrices to the vector form the right:
gl_Position = model * projection * vec4(vertexPosition, 1.0);
gl_Position = vec4(vertexPosition, 1.0) * model * projection;

Can't properly produce a ViewProjection matrix

I have a camera class that uses the DirectXMath API:
__declspec(align(16)) class Camera
{
public:
XMVECTOR Translation;
XMMATRIX Rotation;
XMVECTOR Scale;
XMMATRIX Transform;
XMFLOAT3 RotAngles;
XMMATRIX ProjectionMatrix;
float Width;
float Height;
float NearZ;
float FarZ;
float AspectRatio;
float FieldOfView;
Camera()
{
Translation = XMVectorZero();
Rotation = XMMatrixIdentity();
Scale = XMVectorSplatOne();
Transform = XMMatrixIdentity();
Width = 800;
Height = 600;
NearZ = 0.1f;
FarZ = 100.0f;
AspectRatio = 800 / 600;
FieldOfView = (XM_PIDIV4);
ProjectionMatrix = XMMatrixPerspectiveFovLH(FieldOfView, AspectRatio, NearZ, FarZ);
}
void Update()
{
Rotation = XMMatrixRotationRollPitchYaw(RotAngles.x, RotAngles.y, RotAngles.z);
XMMATRIX scaleM = XMMatrixScalingFromVector(Scale);
XMMATRIX translationM = XMMatrixTranslationFromVector(Translation);
Transform = scaleM * Rotation * translationM;
}
XMMATRIX GetViewMatrix()
{
XMVECTOR Eye;
XMVECTOR At;
XMVECTOR Up;
Eye = Translation;
At = Translation + Transform.r[2];
Up = Transform.r[1];
return(XMMatrixLookAtLH(Eye, At, Up));
}
XMMATRIX GetViewProjectionMatrix()
{
return(XMMatrixTranspose(GetViewMatrix() * ProjectionMatrix));
}
};
When I store the result of GetViewProjectionMatrix() in a XMFLOAT4X4 and update it to the constant buffer, the geometry gets torn apart or doesn't show up at all when I move/rotate the camera with the keyboard.I have isolated the camera to be issue with the deforming/disappearing geometry, but I have no idea what the problem is.I mean the projection matrix can't be wrong, it's just 1 function call, so it's most likely the view matrix.Could someone tell me where the issue is?I tried different combinations of multiplication orders/transposing both/transposing only one/anything.It never works properly.
In case anyone sees this question again:
It seems that OP did not transpose to ViewProjection matrix they generated. Note that DirectXMath works in row-major order while HLSL defaults to column-major. As per the documentation at - https://msdn.microsoft.com/en-us/library/windows/desktop/bb509634(v=vs.85).aspx

Opengl 3.3+ incorrect shadows when using shadow mapping

I am trying to implement shadow mapping on my landscape editor with OpenGL 3.3+. Using a few tutorials I have managed to get my code to compile and run but the whole landscape is in shadow except for the back row of my landscape grid (smallest z).
I am currently using the same projection, view and model matrices for my light as the camera (negative z is furthest from the camera).
Initialisation of my projection, view and model matrices (from LWJGL matrix tutorial):
modelPos = new Vector3f(0f, 0f, -20f);
modelAngle = new Vector3f(15f, 0f, 0f);
modelScale = new Vector3f(1f, 1f, 1f);
cameraPos = new Vector3f(-50f, 0f, -120f);
projectionMatrix = new Matrix4f();
float fieldOfView = 120f;
float aspectRatio = (float)width / (float)height;
float near_plane = 0.01f;
float far_plane = 100f;
float y_scale = DepthMatrixUtility.coTangent(DepthMatrixUtility.degreesToRadians(fieldOfView / 2f));
float x_scale = y_scale / aspectRatio;
float frustum_length = far_plane - near_plane;
projectionMatrix.m00 = x_scale;
projectionMatrix.m11 = y_scale;
projectionMatrix.m22 = -((far_plane + near_plane) / frustum_length);
projectionMatrix.m23 = -1;
projectionMatrix.m32 = -((2 * near_plane * far_plane) / frustum_length);
Binding my matrices when displaying scene:
Matrix4f.translate(cameraPos, viewMatrix, viewMatrix);
Matrix4f.scale(modelScale, modelMatrix, modelMatrix);
Matrix4f.translate(modelPos, modelMatrix, modelMatrix);
Matrix4f.rotate(DepthMatrixUtility.degreesToRadians(modelAngle.z), new Vector3f(0, 0, 1), modelMatrix, modelMatrix);
Matrix4f.rotate(DepthMatrixUtility.degreesToRadians(modelAngle.y), new Vector3f(0, 1, 0), modelMatrix, modelMatrix);
Matrix4f.rotate(DepthMatrixUtility.degreesToRadians(modelAngle.x), new Vector3f(1, 0, 0), modelMatrix, modelMatrix);
matrix = new Matrix4f();
Matrix4f.mul(matrix, projectionMatrix, matrix);
Matrix4f.mul(matrix, viewMatrix, matrix);
Matrix4f.mul(matrix, modelMatrix, matrix);
matrix.store(matrix44Buffer);
matrix44Buffer.flip();
matrixLocation = GL20.glGetUniformLocation(pId, "matrix");
GL20.glUniformMatrix4(matrixLocation, false, matrix44Buffer);
I have tested my FBO with storing colour in the fragment shader, the height map displays correctly (I drew the FBO texture to a small quad in the corner of my screen) and updates as I alter the height map.
I then modified my FBO to store the depth to a texture on the first pass:
depthTexture = GL11.glGenTextures();
GL11.glBindTexture(GL11.GL_TEXTURE_2D, depthTexture);
GL11.glTexImage2D(GL11.GL_TEXTURE_2D, 0, GL11.GL_DEPTH_COMPONENT, Window.getScreenWidth(), Window.getScreenHeight(), 0, GL11.GL_DEPTH_COMPONENT, GL11.GL_UNSIGNED_BYTE, (ByteBuffer)null);
GL11.glTexParameterf(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_WRAP_S, GL11.GL_NEAREST);
GL11.glTexParameterf(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_WRAP_T, GL11.GL_NEAREST);
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MAG_FILTER, GL11.GL_LINEAR);
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MIN_FILTER, GL11.GL_LINEAR);
GL11.glBindTexture(GL11.GL_TEXTURE_2D, 0);
fboId = GL30.glGenFramebuffers();
GL30.glBindFramebuffer(GL30.GL_FRAMEBUFFER, fboId);
GL11.glDrawBuffer(GL11.GL_NONE);
GL11.glReadBuffer(GL11.GL_NONE);
GL32.glFramebufferTexture(GL30.GL_FRAMEBUFFER, GL30.GL_DEPTH_ATTACHMENT, depthTexture, 0);
verifyFBO();
My vertex shader for the first pass (Creating the shadow map):
#version 330 core
uniform mat4 matrix;
in vec4 in_Position;
void main(void)
{
gl_Position = matrix * in_Position;
}
My fragment shader for the first pass:
#version 330 core
layout(location = 0) out float fragmentdepth;
void main(void)
{
fragmentdepth = gl_FragCoord.z;
}
My bias matrix:
[0.5f, 0.0f, 0.0f, 0.0f]
[0.0f, 0.5f, 0.0f, 0.0f]
[0.0f, 0.0f, 0.5f, 0.0f]
[0.5f, 0.5f, 0.5f, 1.0f]
My vertex shader for the second pass (rendering the scene using the shadow map):
void main(void)
{
gl_Position = matrix * in_Position;
ShadowCoord = biasMatrix * lightMatrix * in_Position;
}
My fragment shader for the second pass:
if (texture(shadowMap, ShadowCoord.xy).z < ShadowCoord.z)
{
vec4 colour = 0.5 * out_Colour;
out_Colour = new vec4(colour[0], colour[1], colour[2], 1.0f);
}
After transforming in_Position with lightMatrix, the result is not projected on screen yet.
Actual perspective projection is applied by dividing by w component.
The perspective division will give you texture coordinates and depth in [-1,1] range.
At this point you use biasMatrix to transform them to [0,1] range.
So you shouldn't multiply by biasMatrix, then in your shader before the line
if (texture(shadowMap, ShadowCoord.xy).z < ShadowCoord.z)
add
ShadowCoord.xyz /= ShadowCoord.w;
ShadowCoord = biasMatrix * ShadowCoord;
The biasMatrix content you're showing should be stored transposed in memory. If you hesitate replace the matrix product with
ShadowCoord.xyz = ShadowCoord.xyz * .5f + float3(.5f);

Perspective projection matrix in OpenGL

I an new to perspective division phenomenon. I am rendering a simple square in 3D space using following code :
void MatrixPersp(Mat4& matrix,const float fovy, const float aspect, const float zNear, const float zFar)
{
float sine, cotangent, deltaZ;
float radians;
for(int i=0;i<4;i++)
for(int j=0;j<4;j++)
{
if(i==j)
matrix[i][j]=1.0;
else
matrix[i][j]=0.0;
}
radians = fovy / 2 * GLES_Pi / 180;
deltaZ = zFar - zNear;
sine = (float) sin(radians);
cotangent = (float) cos(radians) / sine;
matrix[0][0] = cotangent / aspect;
matrix[1][1] = cotangent;
matrix[2][2] = -(zFar + zNear) / deltaZ;
matrix[2][3] = -1;
matrix[3][2] = -2 * zNear * zFar / deltaZ;
matrix[3][3] = 0;
return;
}
void Render()
{
GLfloat vertices[] =
{
-0.8,0.6,1.0,1.0,
-0.8,0.2,1.0,1.0,
0.2,0.2,1.0,1.0,
0.2,0.6,1.0,1.0
};
MatrixPersp(perspective,90.0,aspect,2.0,100.0);
glUniformMatrix4fv(glGetUniformLocation(program_object,"MVPMatrix"), 1, GL_FALSE, (GLfloat*)perspective);
glClearDepth(1.0f);
glClear(GL_DEPTH_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glDrawElements();
}
Vertex Shader is :
#version 150
in vec4 position;
uniform mat4 MVPMatrix;
void main()
{
gl_Position = position*MVPMatrix;
}
The problem here is that , when all the four vertices have same z-value nothing is rendered at all. On the other hand if two vertices have -1 as z-coordinate the projection matrix works fine.
I am not sure what is going wrong here.
glUniformMatrix4fv(glGetUniformLocation(program_object,"MVPMatrix"), 1, GL_FALSE, (GLfloat*)perspective);
This line suggests that your matrix is in column-major order. This is confirmed by the way your MatrixPersp function computes its matrix, depending on exactly how Mat4 is defined.
gl_Position = position*MVPMatrix;
If MVPMatrix is properly column-major, then this multiplication is backwards. The position goes on the right.