GLM look at object outside of the screen - opengl

I have a cube with vertex at 1 and -1 (1 1 1, 1 1 -1, etc).
Currently I'm using only the view matrix and set the projection to be identity:
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
mat4 view = glm::lookAt(vec3(0,0,3),
vec3(0,0,0),
vec3(0,0,1));
mat4 mvp = view;
GLuint location = glGetUniformLocation(p_myGLSL->getProgramID(), "mvp");
glUniformMatrix4fv(location, 1, GL_FALSE, &mvp[0][0]);
glBindVertexArray(vaoHandle);
glDrawArrays(GL_TRIANGLES, 0, countV);
glFlush();
glutSwapBuffers();
Vertex shading code:
layout (location = 0) in vec3 VertexPosition;
uniform mat4 mvp;
void main()
{
gl_Position = mvp * vec4(VertexPosition,1.0);
}
VBO, VAO code:
// fill values into VBO
GLuint positionBufferHandle;
glGenBuffers(1, &positionBufferHandle);
glBindBuffer(GL_ARRAY_BUFFER, positionBufferHandle);
glBufferData(GL_ARRAY_BUFFER, countV*sizeof(float), positionData, GL_STATIC_DRAW);
// Vertex array object
glGenVertexArrays(1, &vaoHandle);
glBindVertexArray(vaoHandle);
// Read into vertex shader
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, positionBufferHandle);
glVertexAttribPointer(0,3,GL_FLOAT, GL_FALSE, 0, (GLubyte *) NULL);
glutMainLoop();
Why is it showing a blank screen for now?
P/S: If I create the view matrix manually it works:
mat4 translate = glm::translate(glm::mat4(1), vec3(-centerx, -centery, -centerz));
mat4 scale = glm::scale(glm::mat4(1), vec3(0.2, 0.2, 0.2));
mat4 mvp = scale * translate;

Use vec3(0,1,0) as up vector. When you are looking at (0,0,0) from (0,0,3), you looking in (0,0,-1) direction. The up vector should be perpendicular to this vector, which you can use vec3(0, 1, 0) vector. So, change the view matrix assignment part to:
mat4 view = glm::lookAt(vec3(0,0,3),
vec3(0,0,0),
vec3(0,1,0));
Also, Add #version 430 or whatever GLSL version which you are using to beginning of your vertex shader. layout qualifer is part of OpenGL core, since OpenGL 4.1 not earlier.

Related

OpenGL camera movement program vertex shader issue

So, I'm a beginner learning graphics programmer. I'm working on a program for camera movement. I think there's something wrong with the vertex shader. The program runs with no errors but the screen is completely blank. Here is the vertex shader I'm using:
#version 330
in vec4 vPosition;
out vec4 vColor;
uniform mat4 model_view;
uniform mat4 projection;
void main()
{
vec4 pos = projection * model_view * vPosition / vPosition.w;
gl_Position = pos;
vColor = vPosition;
}
If I switch the shader back to basic version:
#version 330
in vec4 vPosition;
out vec4 vColor;
void
main()
{
gl_Position = vPosition;
vColor = vPosition;
}
The program runs and renders a triangle successfully. So, I'm pretty sure the error is with the shader.
The shader is called in the initialize function:
void initialize(void)
{
glClearColor(1.0, 1.0, 1.0, 1.0); // white background
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
// Create and initialize a buffer object
GLuint buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(points), points, GL_STATIC_DRAW);
// Load shaders and use the resulting shader program
GLuint program = InitShader("res/shaders/vshader21.glsl", "res/shaders/fshader21.glsl");
model_view = glGetUniformLocation(program, "model_view");
projection = glGetUniformLocation(program, "projection");
glUseProgram(program);
// Initialize the vertex position attribute from the vertex shader
GLuint loc = glGetAttribLocation(program, "vPosition");
glEnableVertexAttribArray(loc);
glVertexAttribPointer(loc, 2, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0));
}
the 'points' in glBufferData is as follows:
const int WIDTH = 500, HEIGHT = 500;
/* Positions */
vec4 points[] = {
vec4(0.5,0.5, 1, 1),
vec4(-0.5,0.5, 1, 1),
vec4(0.5,-0.5, 1, 1) ,
vec4(-0.5,-0.5, 1, 1)
};
model_view and projection are of GLuint type in main application and global.
I set the uniform variables (position, model_view) in the display functions.
void display(void)
{
glClear(GL_COLOR_BUFFER_BIT); // clear the window
glPointSize(20.0);
// Projection transformation parameters
GLfloat left = -1.0, right = 1.0;
GLfloat bottom = -1.0, top = 1.0;
GLfloat zNear = 0, zFar = 3.0;
mat4 p = Ortho(left, right, bottom, top, zNear, zFar);
glUniformMatrix4fv(projection, 1, GL_TRUE, p);
vec4 eye(0.0, 0.0, -1.0, 1.0);
vec4 at(0.0, 0.0, 0.0, 1.0);
vec4 up(0.0, 1.0, 0.0, 0.0);
mat4 mv = LookAt(eye, at, up);
glUniformMatrix4fv(model_view, 1, GL_TRUE, mv);
glDrawArrays(GL_TRIANGLES, 0, 3); // draw the points
glFlush();
}
What could possibly be going wrong?
The explicit division by the .w component is superfluous.
vec4 pos = projection * model_view * vPosition / vPosition.w;
vec4 pos = projection * model_view * vPosition;
Note, the Perspective divide is automatically performed after clipping.
Since the vector is multiplied to the uniforms form the right, you do not have to transpose the matrices:
glUniformMatrix4fv(projection, 1, GL_TRUE, p);
glUniformMatrix4fv(projection, 1, GL_FALSE, p);
glUniformMatrix4fv(model_view, 1, GL_TRUE, mv);
glUniformMatrix4fv(model_view, 1, GL_FALSE, mv);
See GLSL Programming/Vector and Matrix Operations

Repeat UV for multiple cubes in one vertex buffer object (VBO)?

I am making a little voxel engine using a chunk system (like in Minecraft). I decided to make 1 VBO per chunk, so the VBO contain multiple cubes that will use different textures.
I actually have the UV of a cube and i would like to use it on all cubes in a VBO so the texture will wrap all cubes the same way if the cubes were in separated VBOs.
Here is what I'm actually getting:
How to tell OpenGL to do the same thing as the first cube on all cubes?
EDIT:
here are my shaders:
vertex shader
#version 400
layout(location = 0) in vec3 vertexPosition_modelspace;
layout(location = 1) in vec2 vertexUV;
out vec2 UV;
uniform mat4 MVP;
void main() {
gl_Position = MVP * vec4(vertexPosition_modelspace, 1);
UV = vertexUV;
}
fragment shader
#version 400
in vec2 UV;
out vec3 color;
uniform sampler2D textureSampler;
void main(){
color = texture(textureSampler, UV).rgb;
}
my glfw loop:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(programID);
glUniformMatrix4fv(MatrixID, 1, GL_FALSE, &chunkHandler.player.mvp[0][0]);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, grass);
glUniform1i(textureID, 0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, chunkHandler.loaded_chunks[0]->vboID);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, tboID);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (void*)0);
glDrawArrays(GL_TRIANGLES, 0, chunkHandler.loaded_chunks[0]->nbVertices);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glfwSwapBuffers(window);
glfwPollEvents();
tboID: tbo is for texture buffer object
how i create the TBO:
glGenBuffers(1, &tboID);
glBindBuffer(GL_ARRAY_BUFFER, tboID);
glBufferData(GL_ARRAY_BUFFER, 36 * 2 * sizeof(float), uvcube, GL_STATIC_DRAW);
While not a complete answer, I can help in debugging.
It seems to me that the texture coordinate are wrong (you don't provide the code for filling them).
In your fragment shader, I would output the U and V coordinate as colors:
#version 400
in vec2 UV;
out vec3 color;
uniform sampler2D textureSampler;
void main(){
color = vec3(UV.u, UV.v, 0);
}
If the coordinates are correct, you should have a gradient on each cube face (each cube vertex will be colored based on its UV, so a (0,0) vertex is black and (0,1) is green and so on).
If it's not the case, try to fix the texture value until you can see them correctly.
This looks suspicious to me: glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (void*)0);

Rendering a model in openGL

I'm trying to render this model in openGL:
model in image viewer
this is my attempt so far:
model rendered by my code
I only gave it the verticies so far, no normals or faces. I'm wondering why it doesn't look full like the first one. Is it because of lack of normals? I'm pretty sure I got the number of verticies/triangles right but I'm not sure if I've made mistakes. What is the next step to fully render the model like the first image?
buffer creation:
//Vertex buffer
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
//color buffer
GLuint colorbuffer;
glGenBuffers(1, &colorbuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(g_color_buffer_data), g_color_buffer_data, GL_STATIC_DRAW);
//create shaders and attach them to a program object
GLuint program = rigShadersToProgram();
GLuint matrixID = glGetUniformLocation(program, "MVP");
rendering loop:
// Rendering loop
while (!glfwWindowShouldClose(window))
{
//clear the screen
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
////////////////////////////matrix operations/////////////////////////////////////////
//projection matrix 45 degree FoV, 4:3 ratio, display range 0.1 - 100
glm::mat4 projection = glm::perspective(99.0f, 4.0f/3.0f, 0.1f, 100.0f);
//camera matrix
glm::mat4 view = glm::lookAt(
glm::vec3(2, 1, -1), //camera is at (2,1,-1)
glm::vec3(0, 0 , 0), //looks at origin
glm::vec3(0,1, 0) //head is up
);
//model matrix identity matrix
glm::mat4 model = glm::mat4(1.0f);
//rotate
model = glm::rotate(model, e, glm::vec3(1,2,3));
//model-view-projection
glm::mat4 MVP = projection * view * model;
//////////////////////////////////////////////////////////////////////////////////////
//use the compiled shaders
glUseProgram(program);
//send transformation matrix to currently bound shader
glUniformMatrix4fv(matrixID, 1, GL_FALSE, &MVP[0][0]);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
//vertex buffer
glVertexAttribPointer(
0, //index
3, //size
GL_FLOAT, //type
GL_FALSE, //normalized?
0, //stride
0 //array buffer offset
);
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, colorbuffer);
//color buffer
glVertexAttribPointer(
1, //index
3, //size
GL_FLOAT, //type
GL_FALSE, //normalized?
0, //stride
0 //array buffer offset
);
//draw triangle
glDrawArrays(GL_TRIANGLES, 0, 12722*3);
std::cout << glfwGetTime() << "\n";
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glfwSwapBuffers(window);
glfwPollEvents();
the model data is stored in an array:
static const GLfloat g_vertex_buffer_data[] = {
-1.557376f, 0.094970f, 0.171995f,
-1.565967f, 0.098142f, 0.171995f,
-1.557376f, 0.094970f, -0.048469f,
-1.565967f, 0.098142f, -0.048469f,
-1.532660f, 0.162907f, -0.048469f,
-1.541251f, 0.166079f, -0.048469f,
-1.444236f, 0.405840f, 0.171996f,
-1.452827f, 0.409013f, 0.171996f,
-1.463533f, 0.352575f, 0.171995f,
-1.472257f, 0.355747f, 0.171995f,
-1.528166f, 0.175331f, 0.011009f,
-1.536757f, 0.178371f, 0.011009f,
-1.538475f, 0.146781f, 0.025019f, ... etc
vertex shader:
#version 430 core
layout(location =0) in vec3 vpos;
layout(location =1) in vec3 vertexColor;
out vec3 fragmentColor;
uniform mat4 MVP;
void main(void)
{
//output position of the vertex in clip space MVP*position
gl_Position = MVP * vec4(vpos,1);
fragmentColor = vertexColor;
};
fragment shader:
#version 430 core
in vec3 fragmentColor;
out vec3 color;
void main()
{
color = fragmentColor;
};
You don't show the actual model loading or rendering code. But what's clear from the screenshot of your program is, that your vertex indexing order is totally messed up. There are vertices connected with each other in your program's drawing that definitely are not connected in the model. Time to recheck the model loader code.
BTW: why did you hardcode the number of vertices to draw in the glDrawArrays call? That doesn't make sense at all.

Issues drawing a line in OpenGL

I already have a program that can draw textured objects. I want to draw debug lines, so I tried to copy the same sort of drawing process I use for sprites to draw a line. I made a new fragment and vertex shader because lines aren't going to be textured and having a different debug shader could be useful.
My system continues to work if I try to draw a line, but the line doesn't draw. I tried to write code similar to working code for my sprites, but clearly I've missed something or made a mistake.
Vertex Shader:
#version 330 core
layout (location = 0) in vec2 position;
uniform mat4 uniformView;
uniform mat4 uniformProjection;
void main()
{
gl_Position = uniformProjection * uniformView * vec4(position, 0.0f, 1.0f);
}
Fragment Shader:
#version 330 core
out vec4 color;
uniform vec4 uniformColor;
void main()
{
color = uniformColor;
}
Drawing Code:
void debugDrawLine(glm::vec3 startPoint, glm::vec3 endPoint, glm::vec3 color, Shader debugShader)
{
GLint transformLocation, colorLocation;
GLfloat lineCoordinates[] = { startPoint.x, startPoint.y,
endPoint.x, endPoint.y};
GLuint vertexArray, vertexBuffer;
glLineWidth(5.0f);
glGenVertexArrays(1, &vertexArray);
glGenBuffers(1, &vertexBuffer);
glBindVertexArray(vertexArray);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
debugShader.Use();
//Copies the Vertex data into the buffer
glBufferData(GL_ARRAY_BUFFER, sizeof(lineCoordinates), lineCoordinates, GL_STATIC_DRAW);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
2, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
2*sizeof(GL_FLOAT), // stride
(GLvoid*)0 // array buffer offset
);
//Sends the sprite's color information in the the shader
colorLocation = glGetUniformLocation(debugShader.Program, "uniformColor");
glUniform4f(colorLocation, 1.0f, color.x, color.y, color.z);
//Activates Vertex Position Information
glEnableVertexAttribArray(0);
// Draw the line
glDrawArrays(GL_LINES, 0, 2);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
}
Line widths greater than 1 are not allowed in core profile. Does the line render if you set the size to 1

Drawing Multiple Models in OpenGL 3.x/4.x

I'm working on a rendering engine and so far it's been coming great, but I can not figure out why when I make two drawing calls to render different models, only 1 will show up.
I'm using wxWidgets to handle the window system, the code in question is pasted below. Any suggestions?
Main Rendering Loop
TestShader.Activate();
glUseProgram(TestShader.Program);
ProjectionMatrix = glm::perspective(45.0f, 4.0f / 3.0f, 0.1f, 100.0f);
int projectionMatrixLocation = glGetUniformLocation(TestShader.Program, "ProjectionMatrix");
glUniformMatrix4fv(projectionMatrixLocation, 1, GL_FALSE, glm::value_ptr(ProjectionMatrix));
glm::mat4 ViewMatrix = glm::lookAt(
glm::vec3(position),
glm::vec3(position+direction),
glm::vec3(up)
);
int viewMatrixLocation = glGetUniformLocation(TestShader.Program, "ViewMatrix");
glUniformMatrix4fv(viewMatrixLocation, 1, GL_FALSE, glm::value_ptr(ViewMatrix));
TestModel[1].Draw(TestShader, glm::vec3(0,0,-11));
TestModel[0].Draw(TestShader, glm::vec3(0,0,-1));
Refresh(false);
Model Drawing Function
void E_MODEL::Draw(EShader Shader, glm::vec3 Location)
{
if (!Registered) glGenVertexArrays(1, &VAO[0]);
glBindVertexArray(VAO[0]);
if (!Registered) glGenBuffers(1, &VBO[0]);
glBindBuffer(GL_ARRAY_BUFFER, VBO[0]);
if (!Registered) glBufferData(GL_ARRAY_BUFFER, Vertices.size() * sizeof(glm::vec3), &Vertices[0], GL_STATIC_DRAW);
if (!Registered) glGenBuffers(1, &VBO[1]);
glBindBuffer(GL_ARRAY_BUFFER, VBO[1]);
if (!Registered) glBufferData(GL_ARRAY_BUFFER, Normals.size() * sizeof(glm::vec3), &Normals[0], GL_STATIC_DRAW);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, VBO[0]);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, VBO[1]);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glm::mat4 modelMatrix = glm::translate(glm::mat4(1.0f), Location);
int modelMatrixLocation = glGetUniformLocation(Shader.Program, "modelMatrix");
glUniformMatrix4fv(modelMatrixLocation, 1, GL_FALSE, glm::value_ptr(modelMatrix));
glDrawArrays( GL_TRIANGLES, 0, Vertices.size() );
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glBindVertexArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
Registered = true;
}
Vertex Shader
#version 330 core
// Input vertex data, different for all executions of this shader.
layout(location = 0) in vec3 vertexPosition_modelspace;
layout(location = 1) in vec3 vertexColor;
// Output data ; will be interpolated for each fragment.
out vec3 fragmentColor;
// Values that stay constant for the whole mesh.
uniform mat4 ProjectionMatrix;
uniform mat4 ViewMatrix;
uniform mat4 modelMatrix;
void main(){
gl_Position = ProjectionMatrix * ViewMatrix * modelMatrix * vec4(vertexPosition_modelspace,1);
// The color of each vertex will be interpolated
// to produce the color of each fragment
fragmentColor = vertexColor;
}
Fragment Shader
#version 330 core
// Interpolated values from the vertex shaders
in vec3 fragmentColor;
// Ouput data
out vec3 color;
void main(){
// Output color = color specified in the vertex shader,
// interpolated between all 3 surrounding vertices
color = fragmentColor;
}
glClear clears a buffer. With the GL_COLOR_BUFFER_BIT flag, you are clearing the color buffer. With the GL_DEPTH_BUFFER_BIT flag, you are clearing the depth buffer. By doing this each time you draw a model, the color and depth information written previously is cleared. This means you are erasing the image every time you draw a model.
You typically clear the buffers once per "paint" or "present". That is, you clear once, you draw N times, you present once. What you are doing now is clear, draw, clear, draw... present.
TL;DR: Call glClear once, before all of your drawing, not before each draw.
is it the glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); inside the draw function? maybe move that out to before you make the calls the draw functions?