I have a very basic engine based on OpenGL that can render polygons in any color to a resizeable window. I'm now trying to implement matrices for my shaders by giving the model, perspective and view matrices as uniforms via my shader. Before adding the uniforms everything worked as it should, I could even pass in a uniform vec2 to simulate a light source at my mouse position. The uniform mat4s doesn't work as well as the vec2s.
For debugging purposes I'm only rendering one yellow square centered on the screen. When using no uniforms the square shows as expected. i now try passing in one mat4, set as an identity matrix. In the vertex shader I'm multiplying gl_Position by the identity matrix I uniformed. When I run the program it only shows a black window.
I've tried manually creating an identity matrix in the vertex shader and multiplying gl_Position by that matrix instead of the uniform one. When I do that my yellow square shows as normal. This leads me to believe that the uniform mat4 doesn't get the correct values, however, I don't know how to check the values of the matrix when it's being used in the shader.
This is how my vertex shader looks:
#version 430 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec4 color;
out vec4 Color;
uniform mat4 model;
uniform mat4 view;
uniform mat4 project;
void main()
{
mat4 id;
id[0] = vec4(1.0, 0.0, 0.0, 0.0);
id[1] = vec4(0.0, 1.0, 0.0, 0.0);
id[2] = vec4(0.0, 0.0, 1.0, 0.0);
id[3] = vec4(0.0, 0.0, 0.0, 1.0);
Color = color;
gl_Position = id * vec4(position, 1.0);
}
The mat4 id is the manually created identity matrix, when changing id * to model * I get the black window.
This is how my fragment shader looks:
#version 430 core
in vec4 Color;
out vec4 outColor;
void main()
{
outColor = Color;
}
The shader is initialized by this code:
m_shaderID = glCreateProgram();
const char* vertexSource = ReadFile::readFile(vertpath);
const char* fragmentSource = ReadFile::readFile(fragpath);
GLint status;
// Vertex Shader
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexSource, NULL);
glCompileShader(vertexShader);
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &status);
if (status != GL_TRUE)
{
std::cout << "Failed to compile vertex shader!\nInfo log: " << std::endl;
char buffer[512];
glGetShaderInfoLog(vertexShader, 512, NULL, buffer);
std::cout << buffer << std::endl;
glDeleteShader(vertexShader);
}
// Fragment Shader
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentSource, NULL);
glCompileShader(fragmentShader);
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &status);
if (status != GL_TRUE)
{
std::cout << "Failed to compile fragment shader!\nInfo log: " << std::endl;
char buffer[512];
glGetShaderInfoLog(fragmentShader, 512, NULL, buffer);
std::cout << buffer << std::endl;
glDeleteShader(fragmentShader);
}
// Shader program
glAttachShader(m_shaderID, vertexShader);
glAttachShader(m_shaderID, fragmentShader);
glLinkProgram(m_shaderID);
glValidateProgram(m_shaderID);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
The matrix is created as a uniform by this code:
void Shader::setUniformmat4(const GLchar* name, glm::mat4 matrix)
{
for (int i = 0; i <= 3; i++)
printf("%f, %f, %f, %f\n", matrix[i].x, matrix[i].y, matrix[i].z, matrix[i].w);
glUniformMatrix4fv(glGetUniformLocation(m_shaderID, name), 1, GL_FALSE, glm::value_ptr(matrix));
}
The printf is for checking the values of the matrix as they are used to create the uniform, and they have the values of an identity matrix at that point.
The function setUniformmat4 is called by this code:
glm::mat4 model = glm::mat4(1.0);
shader.setUniformmat4("model", model);
When creating a lighting effect I create the uniform by calling this function:
void Shader::setUniformvec2(const GLchar* name, glm::vec2 vector)
{
glUniform2f(glGetUniformLocation(m_shaderID, name), vector.x, vector.y);
}
via this piece of code:
shader.setUniformvec2("light_pos", glm::vec2((x / window.getWidth()) * 2.0 - 1.0, 1.0 - 2.0 * (y / window.getHeight())));
Where x and y are the mouses coordinates. I then add the line
uniform vec2 light_pos;
To the fragment shader. This works no problem, and it traces the mouse perfectly. The function used for setting the uniform mat4 looks the same as the function for setting the uniform vec2, only difference is the 4fv for the mat4 and 2f for the vec2.
As you can see, I'm using glm for the matrices and vectors.
My main function looks like this:
Window window(720, 720, "Window");
Shader shader("shader.vert", "shader.frag");
glm::mat4 model = glm::mat4(1.0);
shader.setUniformmat4("model", model);
Renderer* renderer = new Renderer();
std::vector<StaticSprite*> sprites;
sprites.push_back(new StaticSprite(-0.5, -0.5, 0.0, 1.0, 1.0, glm::vec4(1.0, 1.0, 0.0, 1.0), &shader));
while (!window.closed())
{
window.clear();
shader.enable();
double x, y;
window.getMousePosition(x, y);
shader.setUniformvec2("light_pos", glm::vec2((x / window.getWidth()) * 2.0 - 1.0, 1.0 - 2.0 * (y / window.getHeight())));
for (StaticSprite* sprite : sprites)
renderer->submit(sprite);
renderer->flush();
shader.disable();
window.update();
}
return 0;
My question summarized is basically why are the values of the uniform mat4 not correct, is there any way to find out what those values are, and what should I change in the code to make the uniform mat4s work?
Please ask for any additional information needed to give an answer, I will happily provide anything I forgot to include.
glUniform* specify the value of a uniform variable for the current program object. This means the program has to be installed by glUseProgram before:
Shader shader("shader.vert", "shader.frag");
shader.enable(); // <--- this is missing
glm::mat4 model = glm::mat4(1.0);
shader.setUniformmat4("model", model);
Active program resources can be get from a program object which is not the "current" program (e.g. glGetUniformLocation). Note, the program object is a parameter of glGetUniformLocation.
But to set the value of a uniform by glUniform*, the program has to be the currently installed program.
Related
When i want to use gluLookat and have the shaders on it doesnt move the "camera" when i turn the shaders off it works correctly.
Is there something missing in my shaders, i cant figure out what.
void display(void)
{
glClear(GL_COLOR_BUFFER_BIT);
glClearColor(0.8, 0.8, 0.8, 0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(0.5,0,1,0.5,0,0,0,1,0);
glColor3f(0, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, 6);
glDrawArrays(GL_LINES, 6, 6);
glDrawArrays(GL_TRIANGLES, 12,6);
glDrawArrays(GL_LINES, 18, 4);
glDrawArrays(GL_TRIANGLES, 22, 6);
glColor3f(1, 0.7, 0);
glDrawArrays(GL_TRIANGLES, 28, 6);
glFlush();
}
Vertex Shader:
#version 450 core // 420, 330 core , compatibility
in vec4 position
out vec4 Color;
void main()
{
gl_Position = position;
}
Fragment Shader:
#version 450 core // 420, 330 core , compatibility
in vec4 Color;
layout(location=0) out vec4 fColor;
void main()
{
fColor = vec4(0,0,0,0);
}
Move the "camera" to where i want it to be with shaders on
When you use a shader program, then the vertex coordinate attributes are not magically processed, by the current matrices. The shader program has to do the transformations of the vertex coordinates.
You've 2 possibilities, either you use a compatibility profile context and use a lower glsl version (e.g. 1.10).
Then you can use the built in uniform gl_ModelViewProjectionMatrix (see GLSL 1.10 secification) and the fixed function matrix stack will work:
#version 110
attribute vec4 position
// varying vec4 Color;
void main()
{
// ...
gl_Position = gl_ModelViewProjectionMatrix * position;
}
But note this is deprecated since decades. See Fixed Function Pipeline and Legacy OpenGL.
I recommend to use a library like OpenGL Mathematics to calculate the view matrix by lookAt() and a uniform variable:
#version 450 core // 420, 330 core , compatibility
in vec4 position
// out vec4 Color;
layout(location = 7) uniform mat4 view_matrix;
void main()
{
gl_Position = view_matrix * position;
}
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
// [...]
{
// [...]
glUseProgram(program);
glm::mat4 view = glm::lookAt(
glm::vec3(0.5f,0.0f,1.0f), glm::Vec3(0.5f,0.0f,0.0f), glm::Vec3(0.0f,1.0f,0.0f));
glUniformMatrix4fv(7, 1, GL_FALSE, glm::value_ptr(view);
// [...]
}
The uniform location is set explicite by a Layout qualifier (location = 7).
glUniformMatrix4fv sets the value of the uniform at the specified location in the default uniform block. This has to be done after the progroam was installed by glUseProgram.
When I try to link my vertex and fragment shaders into a program, WebGL throws Varyings with the same name but different type, or statically used varyings in fragment shader are not declared in vertex shader: textureCoordinates
I have varying vec2 test in both my vertex and fragment shaders, and can't see any reason why the compiler wouldn't be able to find the same varying in both.
Vertex Shader:
varying vec2 test;
void main(void) {
gl_Position = vec4(0.0, 0.0, 0.0, 0.0);
test = vec2(1.0, 0.0);
}
Fragment Shader:
precision highp float;
varying vec2 test;
void main(void) {
gl_FragColor = vec4(test.xy, 0.0, 1.0);
}
Test code:
const canvas = document.createElement('canvas');
gl = canvas.getContext('webgl')
let vert = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vert, "varying vec2 test;\nvoid main(void) {\n gl_Position = vec4(0.0, 0.0, 0.0, 0.0);\n test = vec2(1.0, 0.0);\n}");
gl.compileShader(vert);
let frag = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(frag, "precision highp float;\nvarying vec2 test;\nvoid main() {\n\tgl_FragColor = vec4(test.xy, 0.0, 1.0);\n}");
gl.compileShader(frag);
let program = gl.createProgram();
gl.attachShader(program, vert);
gl.attachShader(program, frag);
gl.linkProgram(program);
gl.useProgram(program);
Just a guess, but I wonder if it's because you're not using the textureCoordinates in your fragment shader. The names & types match just fine, so i don't think that's the issue. I've done the same thing here:
Frag:
// The fragment shader is the rasterization process of webgl
// use float precision for this shader
precision mediump float;
// the input texture coordinate values from the vertex shader
varying vec2 vTextureCoord;
// the texture data, this is bound via gl.bindTexture()
uniform sampler2D texture;
// the colour uniform
uniform vec3 color;
void main(void) {
// gl_FragColor is the output colour for a particular pixel.
// use the texture data, specifying the texture coordinate, and modify it by the colour value.
gl_FragColor = texture2D(texture, vec2(vTextureCoord.s, vTextureCoord.t)) * vec4(color, 1.0);
}
Vert:
// setup passable attributes for the vertex position & texture coordinates
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
// setup a uniform for our perspective * lookat * model view matrix
uniform mat4 uMatrix;
// setup an output variable for our texture coordinates
varying vec2 vTextureCoord;
void main() {
// take our final matrix to modify the vertex position to display the data on screen in a perspective way
// With shader code here, you can modify the look of an image in all sorts of ways
// the 4th value here is the w coordinate, and it is called Homogeneous coordinates, (x,y,z,w).
// It effectively allows the perspective math to work. With 3d graphics, it should be set to 1. Less than 1 will appear too big
// Greater than 1 will appear too small
gl_Position = uMatrix * vec4(aVertexPosition, 1);
vTextureCoord = aTextureCoord;
}
Issue was resolved by updating Chrome for OSX from v51.something to 52.0.2743.82 (64-bit) Weird.
I'm trying to render an image and offset it by using glTranslate:
glPushMatrix();
glTranslatef(x, y, 0.0f);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
glPopMatrix();
I'm also using a shader, and in the vertexshader I set the position of the vertices:
in vec2 position;
in vec3 color;
out vec3 Color;
void main() {
Color = color;
gl_Position = vec4(position, 0.0, 1.0);
}
However, this always renders the square at the same position. I'm thinking this is because the position vector is always the same. How can I use this shader but still be able to move the image around with glTranslate? I suspect I have to change my shader input, but how?
glTranslatef changes the MVP matrix which gets passed as a uniform into the vertex shader. There is a shortcut in pre 150 by using
gl_Position = ftransform();
Which applies the transformation matrices to the input position as it was passed in with glVertex*.
However glsl 150 core doesn't allow using that uniform or that function. Instead create a matrix uniform and pass it in:
#version 150 core
in vec2 position;
in vec3 color;
out vec3 Color;
uniform mat4 mvp;
void main() {
Color = color;
gl_Position = mvp * vec4(position, 0.0, 1.0);
}
I've setup an OpenGL environment with deferred shading following this tutorial but I can't make the second shader output on my final buffer.
I can see that the first shader (the one that doesn't use lights) is working properly because with gDEBugger I can see that the output buffers are correct, but the second shader really can't display anything. I've also tried to make the second shader output a single color for all the scene just to see if it was displying something, bot nothing is visible (the screen should be completely red but it isn't).
The first pass shader (the one I use to create the buffers for the GBuffer) is working so I'm not add it's code or how I created and implemented my GBuffer, but if you need I'll add them, just tell me.
I think the problem is when I tell OpenGL to output on the FrameBuffer 0 (my video).
This is how I enalbe OpenGL to write to the FrameBuffer 0:
glEnable(GL_BLEND);
m_MotoreGrafico->glBlendEquation(GL_FUNC_ADD);
glBlendFunc(GL_ONE, GL_ONE);
// Abilito la scrittura sul buffer finale
m_MotoreGrafico->glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
m_gBuffer.BindForReading();
glClear(GL_COLOR_BUFFER_BIT);
// Imposto le matrici dello shader
SetUpOGLProjectionViewMatrix(1);
// Passo le texture del GBuffer allo shader
pActiveShader->setUniform1i(_T("gPositionMap"), m_gBuffer.GetPositionTexture());
pActiveShader->setUniform1i(_T("gColorMap"), m_gBuffer.GetDiffuseTexture());
pActiveShader->setUniform1i(_T("gNormalMap"), m_gBuffer.GetNormalTexture());
// Passo variabili necessarie allo shader
float dimensioneFinestra[2], posizioneCamera[3];
dimensioneFinestra[0] = m_nLarghezzaFinestra;
dimensioneFinestra[1] = m_nAltezzaFinestra;
m_MotoreGrafico->GetActiveCameraPosition(posizioneCamera);
pActiveShader->setUniform2f(_T("gScreenSize"), dimensioneFinestra);
pActiveShader->setUniform3f(_T("gCameraPos"), posizioneCamera);
pActiveShader->setUniform1i(_T("gUsaLuci"), 0);
// Disegno le luci
float coloreLuce[3], posizioneLuce[3], direzioneLuce[3], vUpLuce[3], vRightLuce[3], intensita;
for(int i = 0; i < GetDocument()->m_RTL.GetNLights(); i++)
{
CRTLuce* pRTLuce = GetDocument()->m_RTL.GetRTLightAt(i);
...
m_MotoreGrafico->glBindVertexArray(pRTLuce->GetRTLuce()->GetVBO()->getVBAIndex());
glDrawArrays(GL_TRIANGLES, 0, pRTLuce->GetRTLuce()->GetNVertPerShader());
}
The function m_gBuffer.BindForReading() is like this (bot I think it doesn't matter for my problem):
for (unsigned int i = 0 ; i < ARRAY_SIZE_IN_ELEMENTS(m_textures); i++)
{
m_pMotoreGrafico->glActiveTexture(GL_TEXTURE0 + i);
glBindTexture(GL_TEXTURE_2D, m_textures[GBUFFER_TEXTURE_TYPE_POSITION + i]);
}
So far my GBuffer is working (it creates the textures) and my first shader is also working (it's drawing the textures of my GBuffer).
The problem then is that I can't reset OpenGL to draw in my video.
The first 4 textures are the ones create with the first-pass shader.
This is my back buffer (after the second-pass shader)
And this is my front buffer (after the second-pass shader)
This is my second-pass fragment shader code (it outputs only red)
out vec4 outputColor;
void main()
{
outputColor = vec4(1.0, 0.0, 0.0, 1.0);
}
Does anyone have an idea of what I'm doing wrong?
Second-pass vertex shader code:
#version 330
uniform struct Matrici
{
mat4 projectionMatrix;
mat4 modelMatrix;
mat4 viewMatrix;
} matrices;
layout (location = 0) in vec3 inPosition;
void main()
{
vec4 vEyeSpacePosVertex = matrices.viewMatrix * matrices.modelMatrix * vec4(inPosition, 1.0);
gl_Position = matrices.projectionMatrix * vEyeSpacePosVertex;
}
Second-pass fragment shader code:
#version 330
uniform struct MDLight
{
vec3 vColor;
vec3 vPosition;
vec3 vDirection;
float fAmbientIntensity;
float fStrength;
int bOn;
float fConeCosine;
float fAltezza;
float fLarghezza;
vec3 vUp;
vec3 vRight;
} gLuce;
uniform float gSpecularIntensity;
uniform float gSpecularPower;
uniform sampler2D gPositionMap;
uniform sampler2D gColorMap;
uniform sampler2D gNormalMap;
uniform vec3 gCameraPos;
uniform vec2 gScreenSize;
uniform int gLightType;
uniform int gUsaLuci;
vec2 CalcTexCoord()
{
return gl_FragCoord.xy / gScreenSize;
}
out vec4 outputColor;
void main()
{
vec2 TexCoord = CalcTexCoord();
vec4 Color = texture(gColorMap, TexCoord);
outputColor = vec4(1.0, 0.0, 0.0, 1.0);
}
I can't seem to get my Square into the correct viewing Matrix in order to manipulate it using glmfunctions.
This is basically my main.cpp which consists of init() which loads a texture, glsl vert/frag files and constructs the square from the ground class. There is also a reshape() and display() function which calls drawGround() that renders the actual square.
Inside the drawGround() I've added the model/view matrices and done a small translation but it doesn't work... I've been playing with it for hours and can't seem to get it working....
void display()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
drawGround();
glUseProgram(0);
}
void drawGround(){
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(myShader.handle());
GLuint matLocation = glGetUniformLocation(myShader.handle(), "ProjectionMatrix");
glUniformMatrix4fv(matLocation, 1, GL_FALSE, &ProjectionMatrix[0][0]);
glm::mat4 viewingMatrix = glm::translate(glm::mat4(1.0),glm::vec3(0,0,-1));
ModelViewMatrix = glm::translate(viewingMatrix,glm::vec3(15.0,0.0,0));
glUniformMatrix4fv(glGetUniformLocation(myShader.handle(), "ModelViewMatrix"), 1, GL_FALSE, &ModelViewMatrix[0][0]);
ground.render(texName, &myShader);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
glUseProgram(0);
}
Yet on my other program I have the following function renderSky() which works just fine.
Please help me figure out where I'm going wrong...
If you need to see the Ground Class, let me know.
void renderSky(){
glUseProgram(mySkyShader.handle());
GLuint matLocation = glGetUniformLocation(mySkyShader.handle(), "ProjectionMatrix");
glUniformMatrix4fv(matLocation, 1, GL_FALSE, &ProjectionMatrix[0][0]);
glm::mat4 viewingMatrix = glm::translate(glm::mat4(1.0),glm::vec3(0,0,-1));
ModelViewMatrix = glm::translate(viewSkyMatrix,glm::vec3(15,0,0));
glUniformMatrix4fv(glGetUniformLocation(mySkyShader.handle(), "ModelViewMatrix"), 1, GL_FALSE, &ModelViewMatrix[0][0]);
skyBox.render();
glUseProgram(0);
}
This is the vertex shader:
#version 150
in vec3 in_Position;
in vec4 in_Color;
out vec4 ex_Color;
in vec2 in_TexCoord;
out vec2 ex_TexCoord;
void main(void)
{
gl_Position = vec4(in_Position, 1.0);
ex_Color = in_Color;
ex_TexCoord = in_TexCoord;
}
In order to get the object into the correct Model/View space, it has to be multiplied against the in_Position in the vertex shader.
gl_Position = vec4(in_Position, 1.0); becomes this:
gl_Position = ProjectionMatrix * ModelViewMatrix * vec4(in_Position, 1.0);
Also, the Model and Projection matrices need to have uniforms added to the shader:
uniform mat4 ModelViewMatrix;
uniform mat4 ProjectionMatrix;
The vertex shader works with this additional code and allows the matrices to be manipulated by any following glm functions.