Here is my code for generating the texture(MRE):
glGenTextures(1, &id);
glBindTexture(GL_TEXTURE_2D, id);
if(readAlpha)
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
else
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, data);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glGenerateMipmap(GL_TEXTURE_2D);
Here is how Tex Coords are generated:
for (int y = 0; y < resolution; y++)
{
for (int x = 0; x < resolution; x++)
{
int i = x + y * resolution;
glm::vec2 percent = glm::vec2(x, y) / ((float)resolution - 1);
glm::vec3 pointOnPlane = (percent.x - .5f) * 2 * right + (percent.y - .5f) * 2 * front;
pointOnPlane *= scale;
vertices[i] = Vert();
vertices[i].position = glm::vec3(0.0f);
vertices[i].position.x = (float)pointOnPlane.x;
vertices[i].position.y = (float)pointOnPlane.y;
vertices[i].position.z = (float)pointOnPlane.z;
vertices[i].texCoord = glm::vec2(percent.x, percent.y)*textureScale;
vertices[i].normal = glm::vec3(0.0f);
if (x != resolution - 1 && y != resolution - 1)
{
inds[triIndex] = i;
inds[triIndex + 1] = i + resolution + 1;
inds[triIndex + 2] = i + resolution;
inds[triIndex + 3] = i;
inds[triIndex + 4] = i + 1;
inds[triIndex + 5] = i + resolution + 1;
triIndex += 6;
}
}
}
Here is the shader:
VERT:
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aNorm;
layout (location = 2) in vec2 aTexCoord;
uniform mat4 _PV;
uniform mat4 _Model;
out DATA
{
vec3 FragPos;
vec3 Normal;
vec2 TexCoord;
mat4 PV;
} data_out;
void main()
{
gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);
data_out.FragPos = aPos;
data_out.Normal = aNorm;
data_out.TexCoord = aTexCoord;
data_out.PV = _PV;
}
GEOM:
#version 330 core
layout(triangles) in;
layout(triangle_strip, max_vertices = 3) out;
out vec3 FragPos;
out vec3 Normal;
out vec2 TexCoord;
in DATA
{
vec3 FragPos;
vec3 Normal;
vec2 TexCoord;
mat4 PV;
} data_in[];
void main()
{
gl_Position = data_in[0].PV * gl_in[0].gl_Position;
Normal = data_in[0].Normal;
TexCoord = data_in[0].TexCoord;
FragPos = data_in[0].FragPos;
EmitVertex();
gl_Position = data_in[0].PV * gl_in[1].gl_Position;
Normal = data_in[1].Normal;
TexCoord = data_in[0].TexCoord;
FragPos = data_in[1].FragPos;
EmitVertex();
gl_Position = data_in[0].PV * gl_in[2].gl_Position;
Normal = data_in[2].Normal;
TexCoord = data_in[0].TexCoord;
FragPos = data_in[2].FragPos;
EmitVertex();
EndPrimitive();
}
FRAG:
#version 330 core
out vec4 FragColor;
uniform vec3 _LightPosition;
uniform vec3 _LightColor;
uniform sampler2D _Diffuse;
//unifrom float _UseTexutres;
in vec3 FragPos;
in vec3 Normal;
in vec2 TexCoord;
void main()
{
//vec3 objectColor = vec3(0.34f, 0.49f, 0.27f);
vec3 objectColor = vec3(1, 1, 1);
objectColor = texture(_Diffuse, TexCoord).xyz;
vec3 norm = normalize(Normal);
vec3 lightDir = normalize(_LightPosition - FragPos);
float diff = max(dot(norm, lightDir), 0.0f);
vec3 diffuse = diff * _LightColor;
vec3 result = (vec3(0.2, 0.2, 0.2) + diffuse) * objectColor;
FragColor = vec4(result, 1.0);
}
I am getting pixilated texture even thought I am using a 8K texture.
If you want to see the entire source : https://github.com/Jaysmito101/TerraGen3D
Here is the result:
Your geometry shader does not make sense:
First of all, you use the same data_in.TexCoords[0] for all 3 vertices of of the output triangle, which means that all fragments generated for this triangle will sample the exact same location of the texture, resulting in the exact same output color, so the "pixelated" structure of the image emerges. Like you do already for Normal and FragPos, you should forward the data for each vertex. This already should solve your issue.
However, there are more issues with your approach. You do forward mat4 PV as per-Vertex data from the VS to the GS. However, the data you forward is an uniform, so this is a waste of resources. Every shader stage has access to all of the uniforms, so there is no need to forward this data per vertex.
But the real elephant in the room is what this geometry shader is supposed to be doing. The actual transformation with the uniform matrices can - and absolutely should - be carried out directly in the vertex shader. And the rest of your geometry shader is basically an attempt at a pass-through implementation (just a faulty one). So what do you need this shader for? You
can do the transformation in the VS and completely remove the geometry shader. And performance-wise, this will also be a win as geometry shaders are rather inefficent and should be avoided if not absolutely needed.
Related
I’m working on my own game engine AND learning OpenGL in the process, and I have got stuck in Shadowmapping for longer than I would like to admit.
I have been following this guide: https://learnopengl.com/Advanced-Lighting/Shadows/Shadow-Mapping But I don’t get to draw any shadow in my plane.
This is how the code looks in my project:
Framebuffer for creating the depth texture:
glCreateFramebuffers(1, &m_framebufferID);
glBindFramebuffer(GL_FRAMEBUFFER, m_framebufferID);glGenTextures(1, &depthMapID);
ui32 depthMapID;
glBindTexture(GL_TEXTURE_2D, depthMapID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, 1024, 1024, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindFramebuffer(GL_FRAMEBUFFER, m_framebufferID);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, depthMapID, 0);
ASSERT(glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE, "Framebuffer is incomplete!");
glBindFramebuffer(GL_FRAMEBUFFER, 0);
Depth texture shader (ignore the unused vertex layout):
#type vertex
#version 330 core
layout (location = 0) in vec3 vertex_position;
layout (location = 1) in vec3 vertex_color;
layout (location = 2) in vec3 vertex_normal;
layout (location = 3) in vec3 vertex_tangent;
layout (location = 4) in vec2 vertex_texcoord;
uniform mat4 u_lightViewProjectionMatrix;
uniform mat4 u_worldTransformMatrix;
void main()
{
gl_Position = u_lightViewProjectionMatrix * (u_worldTransformMatrix * vec4(vertex_position, 1.0));
}
#type fragment
#version 330 core
// Ouput data
//layout(location = 0) out float fragmentdepth;
void main(){
// Not really needed, OpenGL does it anyway
//fragmentdepth = gl_FragCoord.z;
}
This is how I create the shadow map
if (s_castingShadowMeshes.Size() == 0)
return;
float shadowDistance = 100.0f;
glm::mat4 lightView = glm::lookAt(LightManager::GetDirectionalLight().direction * -shadowDistance, glm::vec3(0.0f), MathUtils::Vector3UnitY);
glm::mat4 lightProjection = glm::ortho(-shadowDistance, shadowDistance, -shadowDistance, shadowDistance, 0.0f, shadowDistance * 2.0f);
s_lightViewProjection = lightProjection * lightView;
glBindFramebuffer(GL_FRAMEBUFFER, m_framebufferID);
glViewport(0, 0, 1024, 1024); //Shadow height and width is 1024
glDepthMask(GL_TRUE);
glClearDepth(1.0f);
glClearColor(color.r, color.g, color.b, color.a);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT););
for (ui32 i = 0; i < s_castingShadowMeshes.Size(); ++i)
{
s_depthMapShader->Bind();
s_depthMapShader->SetUniform(UNIFORM_LIGHT_SPACE_TRANSFORM, ShaderDataType::Mat4, &(s_lightViewProjection));
s_depthMapShader->SetUniform(UNIFORM_MODEL_SPACE_TRANSFORM, ShaderDataType::Mat4, &(s_castingShadowMeshes[i]->GetWorldTransform()[0][0]));
s_rendererPlatformInterface->DrawVertexArray(s_castingShadowMeshes[i]->GetVertexArray());
s_renderStats.drawCalls++;
s_depthMapShader->Unbind();
}
s_shadowFramebuffer->Unbind();
Up to this point, RenderDoc shows me that a depth texture is actually being generated:
Now, this is how rendering the plane looks like:
shader->Bind();
ui32 useShadowMapTex = 0;
if (receiveShadows)
{
useShadowMapTex = 1;
ui32 shadowMapSlot = (ui32)Material::TextureSlots::ShadowMap;
shader->SetUniform(UNIFORM_SHADOWMAP_TEX, ShaderDataType::Int, &shadowMapSlot);
s_shadowMapTex->Bind(shadowMapSlot);
}
shader->SetUniform(UNIFORM_USE_SHADOWMAP_TEX, ShaderDataType::Int, &useShadowMapTex);
shader->SetUniform(UNIFORM_MODEL_SPACE_TRANSFORM, ShaderDataType::Mat4, &(transform[0][0]));
shader->SetUniform(UNIFORM_VIEW_PROJECTION, ShaderDataType::Mat4, &(s_sceneData.viewProjectionMatrix));
shader->SetUniform(UNIFORM_CAMERA_POS, ShaderDataType::Float3, &(s_sceneData.cameraPosition));
shader->SetUniform(UNIFORM_DIR_LIGHT_DIRECTION, ShaderDataType::Float3, &(LightManager::GetDirectionalLight().direction));
shader->SetUniform(UNIFORM_DIR_LIGHT_AMBIENT,ShaderDataType::Float3, &(LightManager::GetDirectionalLight().ambientColor));
shader->SetUniform(UNIFORM_DIR_LIGHT_DIFUSSE, ShaderDataType::Float3, &(LightManager::GetDirectionalLight().diffuseColor));
shader->SetUniform(UNIFORM_DIR_LIGHT_SPECULAR, ShaderDataType::Float3, &(LightManager::GetDirectionalLight().specularColor));
s_rendererPlatformInterface->DrawVertexArray(vertexArray);
s_renderStats.drawCalls++;
s_renderStats.vertices += vertexArray->GetIndexBuffer()->GetCount();
The shader for the plane:
#type vertex
#version 330 core
layout (location = 0) in vec3 vertex_position;
layout (location = 1) in vec3 vertex_color;
layout (location = 2) in vec3 vertex_normal;
layout (location = 3) in vec3 vertex_tangent;
layout (location = 4) in vec2 vertex_texcoord;
out VS_OUT {
vec3 FragPos;
vec3 Normal;
vec2 TexCoords;
vec4 FragPosLightSpace;
} vs_out;
uniform mat4 u_viewProjectionMatrix;
uniform mat4 u_worldTransformMatrix;
uniform mat4 u_lightViewProjectionMatrix;
void main()
{
vs_out.FragPos = vec3(u_worldTransformMatrix * vec4(vertex_position, 1.0));
vs_out.Normal = transpose(inverse(mat3(u_worldTransformMatrix))) * vertex_normal;
vs_out.TexCoords = vertex_texcoord;
vs_out.FragPosLightSpace = u_lightViewProjectionMatrix * vec4(vs_out.FragPos, 1.0);
gl_Position = u_viewProjectionMatrix* vec4(vs_out.FragPos, 1.0);
}
#type fragment
#version 330 core
out vec4 FragColor;
in VS_OUT {
vec3 FragPos;
vec3 Normal;
vec2 TexCoords;
vec4 FragPosLightSpace;
} fs_in;
struct DirectionalLight
{
vec3 direction;
vec3 ambientColor;
vec3 diffuseColor;
vec3 specularColor;
};
uniform DirectionalLight u_directionalLight;
uniform sampler2D u_shadowMapTex;
uniform vec3 u_cameraPos;
float ShadowCalculation(vec4 fragPosLightSpace)
{
// perform perspective divide
vec3 projCoords = fragPosLightSpace.xyz / fragPosLightSpace.w;
// transform to [0,1] range
projCoords = projCoords * 0.5 + 0.5;
// get closest depth value from light's perspective (using [0,1] range fragPosLight as coords)
float closestDepth = texture(u_shadowMapTex, projCoords.xy).r;
// get depth of current fragment from light's perspective
float currentDepth = projCoords.z;
// check whether current frag pos is in shadow
float shadow = currentDepth > closestDepth ? 1.0 : 0.0;
return shadow;
}
void main()
{
vec3 normal = normalize(fs_in.Normal);
vec3 lightColor = vec3(1.0);
// ambient
vec3 ambient = u_directionalLight.ambientColor;
// diffuse
vec3 lightDir = normalize(u_directionalLight.direction);
float diff = max(dot(lightDir, normal), 0.0);
vec3 diffuse = diff * lightColor;
// specular
vec3 viewDir = normalize(u_cameraPos - fs_in.FragPos);
float spec = 0.0;
vec3 halfwayDir = normalize(lightDir + viewDir);
spec = pow(max(dot(normal, halfwayDir), 0.0), 64.0);
vec3 specular = spec * lightColor;
// calculate shadow
float shadow = ShadowCalculation(fs_in.FragPosLightSpace);
vec3 lighting = (ambient + (1.0 - shadow) * (diffuse + specular));
FragColor = vec4(lighting, 1.0);
}
This is what i get:
According to RenderDoc, the shadow map texture is actually being passed in the shader, but it never gets drawn in the red plane:
I hope somebody can help me. Many thanks in advance.
p.d: this is my first post ever in StackOverflow, excuse me if i am violating any rule.
In my shadow map implementation for directional lights I have a collection of items which I iterate them to render their depth to a depth buffer then iterate them again to render them normally with the generated shadow map, each item has a bool property called castShadow which I test if true add it to the depth buffer, the problem is if the item cast shadow and rendered to the depth buffer it can't receive shadows at all, once I set the to false then it is not rendered as depth it start to receive shadows without problem.
so in render function first I render the depth buffer as following:
When the object cast shadow it doesn't receive shadows
When the object isn't cast shadow it receives shadows
void Engine::renderDepthMaps() {
if (sun != nullptr) {
if (sun->castShadow) {
glViewport(0, 0, SHADOW_WIDTH, SHADOW_HEIGHT);
glBindFramebuffer(GL_FRAMEBUFFER, depthMapFBO);
glClear(GL_DEPTH_BUFFER_BIT);
glm::vec3 pos = -500.0f * sun->direction;
glm::mat4 lightSpaceProjection = glm::ortho(-10.0f, 10.0f, -10.0f, 10.0f, -10.0f, 1000.0f);
glm::mat4 lightSpaceView = glm::lookAt(pos, player->getPosition(), glm::vec3(0, 1, 0));
lightSpaceMatrix = lightSpaceProjection * lightSpaceView;
Shader& shader = shaders.getShader(DEPTH);
shader.use();
shaders.setDepthLightSpaceUniform(shader, lightSpaceMatrix);
for (item_it it = engineItems.begin(); it != engineItems.end(); ++it) {
if (it->get()->castShadow)
it->get()->renderDepth(deltaFirst);
}
glBindFramebuffer(GL_FRAMEBUFFER, 0);
}
}
}
and the render function is:
void Engine::renderMeshes() {
glViewport(0, 0, width, height);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
Shader& basicShader = shaders.getShader(BASIC);
basicShader.use();
sun->setUniforms(basicShader.getProgramID(), "directLights");
shaders.setLightSpaceUniform(basicShader, sun, lightSpaceMatrix, depthMap);
shaders.setShaderViewUniforms(basicShader, camera);
terrain->render(basicShader, deltaFirst);
for (item_it it = basicItems.begin(); it != basicItems.end(); ++it) {
if (it->get()->type != "terrain")
it->get()->render(basicShader, deltaFirst);
}
}
and the shaders are:
vertex:
#version 300 es
precision highp float;
layout (location = 0) in vec3 position;
layout (location = 1) in vec3 normal;
layout (location = 2) in vec2 texCoord;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
uniform mat4 lightSpaceMatrix;
uniform mat3 normalMatrix;
out vec4 FragPosLightSpace;
out vec2 TexCoords;
out vec3 Normal;
out vec3 viewPos;
out vec3 fragPos;
void main(){
TexCoords = texCoord;
Normal=normalMatrix*normal;
fragPos = vec3(model * vec4(position,1.0f));
FragPosLightSpace =lightSpaceMatrix * model * vec4(position,1.0f);
gl_Position = projection * view * model * vec4(position,1.0f);
}
fragment:
#version 300 es
precision highp float;
out vec4 glFragColor;
vec2 poissonDisk[4] = vec2[](
vec2( -0.94201624, -0.39906216 ),
vec2( 0.94558609, -0.76890725 ),
vec2( -0.094184101, -0.92938870 ),
vec2( 0.34495938, 0.29387760 )
);
struct DirectLight {
vec3 direction;
vec3 color;
float intensity;
};
in vec3 Normal;
in vec2 TexCoords;
in vec3 fragPos;
in vec4 FragPosLightSpace;
uniform int has_texture;
uniform vec3 matDiffuse;
uniform sampler2D shadowMap;
uniform sampler2D mat_diffuse;
uniform sampler2D mat_specular;
uniform vec3 matSpecular;
uniform float shininess;
uniform DirectLight sun;
uniform int castShadow;
uniform vec3 viewPos;
void main(){
vec3 tex=vec3(1),spe=vec3(1);
if(has_texture==1){
tex=vec3(texture(mat_diffuse, TexCoords));
spe=vec3(texture(mat_specular,TexCoords));
}
vec3 diffColor = matDiffuse * tex;
vec3 specColor = matSpecular * spe;
vec3 ambient = vec3(0.4,0.4,0.4)*diffColor;
vec3 lightDir = normalize(-sun.direction);
float diff = max(dot(Normal,lightDir), 0.0);
vec3 diffuse = sun.color * diff * diffColor;
vec3 viewDir = normalize(viewPos - fragPos);
vec3 reflectDir = reflect(-lightDir, Normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), shininess);
vec3 specular = spec * specColor * sun.color;
vec3 color = ambient + diffuse + specular;
float gamma = 2.2;
color.rgb = pow(color.rgb, vec3(1.0/gamma));
if(castShadow==1){
vec3 projCoords = FragPosLightSpace.xyz / FragPosLightSpace.w;
projCoords = projCoords * 0.5 + 0.5;
float shadow = 1.0;
for (int i=0;i<4;i++){
shadow -= 0.2*(1.0-texture( shadowMap, projCoords.xy + poissonDisk[i]/700.0).r);
}
if(projCoords.z > 1.0)
shadow = 0.0;
color *=shadow;
}
glFragColor=vec4(color,1.0f);
}
Another problem: the light space perspective matrix is glm::ortho(-10.0f, 10.0f, -10.0f, 10.0f, -10.0f, 1000.0f) and the shadow map size is 1024, when an object is outside that matrix it create very long shadow and is not in the correct direction as shown in the following image:
I am trying to implement shadow maps for point lights. Basically I'm creating a framebuffer and then render all shadow casters on each side of a cubemap texture (which is 6 times) and then read it in the regular rendering pass and determine which pixel is in shadow. I have several questions:
Why do I have to include a color attachment in addition to a depth component in order for my cubemap to get anything rendered to? I tried it without the color attachment and it did not work.
After adding the color attachment, I can see my shadow casters in the cubemap but it seems the shadow comparison is wrong. I am suspecting that one is in NDC while the other isn't.
Here's how I initialize my framebuffer containing the shadow cubemap:
// Create the depth buffer
glGenTextures(1, &mDepthTextureID);
glBindTexture(GL_TEXTURE_2D, mDepthTextureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT32, width, height, 0, GL_DEPTH_COMPONENT, GL_FLOAT, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindTexture(GL_TEXTURE_2D, 0);
//Create the cubemap texture
glGenTextures(1, &mCubemapTextureID);
glBindTexture(GL_TEXTURE_CUBE_MAP, mCubemapTextureID);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
for (GLuint i = 0; i < 6; ++i)
{
glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, GL_R32F, width, height, 0, GL_RED, GL_FLOAT, 0);
}
glBindTexture(GL_TEXTURE_CUBE_MAP, 0);
//Create the framebuffer and attach the cubemap texture to it
glGenFramebuffers(1, &mFrameBufferObjectID);
glBindFramebuffer(GL_FRAMEBUFFER, mFrameBufferObjectID);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, mDepthTextureID, 0);
//Disable writes to the color buffer
glDrawBuffer(GL_NONE);
//Disable reads from the color buffer
glReadBuffer(GL_NONE);
GLenum Status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (Status != GL_FRAMEBUFFER_COMPLETE)
{
switch(Status)
{
case GL_FRAMEBUFFER_UNSUPPORTED:
printf("FrameBuffer unsupported error");
return false;
break;
case GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT:
printf("FrameBuffer incomplete attachement");
return false;
break;
default:
printf("GLShadowCubemap error, status: 0x%x\n", Status);
return false;
}
}
//Unbind this
glBindFramebuffer(GL_FRAMEBUFFER, 0);
Here's my shadow's vertex shader: (Only the Position attribute is used)
#version 330 core
layout (location = 0) in vec3 Position;
layout (location = 1) in vec3 Normal;
layout (location = 2) in vec2 TexCoord;
layout (location = 3) in vec3 Tangent;
uniform mat4 gModelMatrix;
uniform mat4 gModelViewProjectionMatrix;
out vec3 WorldPosition;
/*
* Below needs a GS and using layered rendering
void main()
{
gl_Position = gModelMatrix * vec4(Position, 1.0);
}
*/
void main()
{
vec4 pos4 = vec4(Position, 1.0);
gl_Position = gModelViewProjectionMatrix * pos4;
WorldPosition = (gModelMatrix * pos4).xyz;
}
Here's my shadow fragment shader:
#version 330 core
in vec3 WorldPosition;
uniform vec3 gLightPosition;
out float Fragment;
void main()
{
// get distance between fragment and light source
float dist_to_light = length(WorldPosition - gLightPosition);
//gl_FragDepth = dist_to_light;
Fragment = dist_to_light;
}
Additional question here:
I saw that many have said that overriding gl_FragDepth is a bad idea. I kind of know why but what's strange here is that if I were to override the gl_FragDepth manually, nothing gets written to the cubemap. Why?
Here's how I render all the regular stuff (the variable i is an index to my lights array)
mShadowCubemapFBOs[i].ViewportChange();
mShadowMapTechnique.SetLightPosition(light.Position);
const float shadow_aspect = (static_cast<float>(mShadowWidth) / mShadowHeight);
const mat4 shadow_projection_matrix = glm::perspective(90.f, shadow_aspect, 1.f, mShadowFarPlane);
const vector<MeshComponent>& meshes = ComponentManager::Instance().GetMeshComponentPool().GetPool();
for(int layer = 0; layer < 6; ++layer)
{
GLenum cubemap_face = GL_TEXTURE_CUBE_MAP_POSITIVE_X + layer;
mShadowCubemapFBOs[i].Bind(cubemap_face);
glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
for(const MeshComponent& mesh : meshes)
{
//the transform_component is referenced ahead of time.
const mat4 model_transform = transform_component->GetTransformMatrix();
mShadowMapTechnique.SetModelViewProjectionMatrix(light.Position, cubemap_face, shadow_projection_matrix, model_transform);
mShadowMapTechnique.SetModelMatrix(model_transform);
mesh.Render();
}
}
Finally here's the regular rendering shader:
#version 330 core
const int MAX_LIGHTS = 8;
const int LIGHT_TYPE_DIRECTIONAL = 0;
const int LIGHT_TYPE_POINT = 1;
const int LIGHT_TYPE_SPOT = 2;
in vec2 TexCoord0;
in vec3 WorldNormal0;
in vec3 WorldPos0;
in vec3 WorldTangent0;
out vec4 FragmentColor;
struct Material
{
vec4 Emissive;
vec4 Ambient;
vec4 Diffuse;
vec4 Specular;
float SpecularPower;
bool UseTexture;
};
struct Light
{
vec3 Position;
vec3 Direction;
vec4 Color; //RGBA
float SpotAngle;
float ConstantAttenuation;
float LinearAttenuation;
float QuadraticAttenuation;
int LightType;
samplerCube ShadowMap; //Cubemap shadows
bool Enabled;
};
struct LightingResult
{
vec4 Diffuse;
vec4 Specular;
};
uniform Material gMaterial;
uniform Light gLights[MAX_LIGHTS];
uniform sampler2D gTextureSampler0;
uniform sampler2D gNormalMap;
uniform bool gEnableNormalMap;
uniform vec3 gEyeWorldPos;
float CalculateShadowFactor(vec3 frag_pos, Light light)
{
vec3 fragment_to_light = frag_pos - light.Position;
float sample_distance = texture(light.ShadowMap, fragment_to_light).r;
float distance = length(fragment_to_light);
if (distance < sample_distance + 0.001)
{
return 1.0; // Inside the light
}
else
{
return 0.5; // Inside the shadow
}
}
//L - Light direction vector from pixel to light source
//N - Normal at the pixel
vec4 CalculateDiffuse(Light light, vec3 L, vec3 N)
{
float n_dot_l = max(0, dot(N, L));
return light.Color * n_dot_l;
}
//V - View vector
//L - Light direction vector from pixel to light source
//N - Normal at the pixel
vec4 CalculateSpecular(Light light, vec3 V, vec3 L, vec3 N)
{
//Phong lighting
vec3 R = normalize(reflect(-L, N));
float r_dot_v = max(0, dot(R, V));
return light.Color * pow(r_dot_v, max(0.4, gMaterial.SpecularPower));
}
float CalculateAttenuation(Light light, float distance)
{
return 1.0 / (light.ConstantAttenuation + light.LinearAttenuation * distance + light.QuadraticAttenuation * distance * distance);
}
//V - View vector
//P - Position of pixel
//N - Normal of pixel
LightingResult CalculatePointLight(Light light, vec3 V, vec3 P, vec3 N)
{
LightingResult result;
result.Diffuse = vec4(0.0, 0.0, 0.0, 1.0);
result.Specular = vec4(0.0, 0.0, 0.0, 1.0);
vec3 L = light.Position - P;
float distance = length(L);
L = normalize(L);
float attenuation = CalculateAttenuation( light, distance );
result.Diffuse = CalculateDiffuse(light, L, N) * attenuation;
result.Specular = CalculateSpecular(light, V, L, N) * attenuation;
return result;
}
//V - View vector
//P - Position of pixel
//N - Normal of pixel
LightingResult CalculateDirectionalLight(Light light, vec3 V, vec3 P, vec3 N)
{
LightingResult result;
result.Diffuse = vec4(0.0, 0.0, 0.0, 1.0);
result.Specular = vec4(0.0, 0.0, 0.0, 1.0);
vec3 L = -light.Direction;
result.Diffuse = CalculateDiffuse(light, L, N);
result.Specular = CalculateSpecular(light, V, L, N);
return result;
}
//L - Light vector
//Smoothness increases as angle gets larger
float CalculateSpotCone(Light light, vec3 L)
{
//cos are in radians
float min_cos = cos(light.SpotAngle);
float max_cos = (min_cos + 1.0f) / 2.0f;
float cos_angle = dot(light.Direction, -L); //negated L such that as we move towards the edge, intensity decreases
return smoothstep(min_cos, max_cos, cos_angle);
}
//V - View vector
//P - Position of pixel
//N - Normal of pixel
LightingResult CalculateSpotLight(Light light, vec3 V, vec3 P, vec3 N)
{
LightingResult result;
result.Diffuse = vec4(0.0, 0.0, 0.0, 1.0);
result.Specular = vec4(0.0, 0.0, 0.0, 1.0);
vec3 L = light.Position - P;
float distance = length(L);
L = normalize(L);
float attenuation = CalculateAttenuation(light, distance);
float spot_intensity = CalculateSpotCone(light, L);
result.Diffuse = CalculateDiffuse(light, L, N) * attenuation * spot_intensity;
result.Specular = CalculateSpecular(light, V, L, N) * attenuation * spot_intensity;
return result;
}
//P - Position of pixel
//N - Normal of pixel
LightingResult CalculateLighting(vec3 P, vec3 N)
{
vec3 V = normalize(gEyeWorldPos - P);
LightingResult total_result;
total_result.Diffuse = vec4(0, 0, 0, 1.0);
total_result.Specular = vec4(0, 0, 0, 1.0);
for(int i = 0; i < MAX_LIGHTS; ++i)
{
if(!gLights[i].Enabled)
{
continue;
}
LightingResult result;
result.Diffuse = vec4(0, 0, 0, 1.0);
result.Specular = vec4(0, 0, 0, 1.0);
float shadow_factor = 1.0;
switch(gLights[i].LightType)
{
case LIGHT_TYPE_DIRECTIONAL:
result = CalculateDirectionalLight(gLights[i], V, P, N);
break;
case LIGHT_TYPE_POINT:
result = CalculatePointLight(gLights[i], V, P, N);
shadow_factor = CalculateShadowFactor(P, gLights[i]);
break;
case LIGHT_TYPE_SPOT:
result = CalculateSpotLight(gLights[i], V, P, N);
shadow_factor = CalculateShadowFactor(P, gLights[i]);
break;
}
total_result.Diffuse += (result.Diffuse * shadow_factor);
total_result.Specular += (result.Specular * shadow_factor);
}
total_result.Diffuse = clamp(total_result.Diffuse, 0, 1);
total_result.Specular = clamp(total_result.Specular, 0, 1);
return total_result;
}
vec3 CalculateNormalMapNormal()
{
vec3 normal = normalize(WorldNormal0);
vec3 tangent = normalize(WorldTangent0);
tangent = normalize(tangent - dot(tangent, normal) * normal); //remove components from the normal vector. This is needed for non-uniform scaling
vec3 bi_tangent = cross(tangent, normal);
vec3 bump_map = texture(gNormalMap, TexCoord0).xyz;
bump_map = 2.0 * bump_map - vec3(1.0, 1.0, 1.0); //Remaps the values
mat3 TBN = mat3(tangent, bi_tangent, normal);
vec3 actual_normal = TBN * bump_map;
return normalize(actual_normal);
}
void main()
{
vec3 pixel_normal = normalize(WorldNormal0);
vec4 texture_color = vec4(0, 0, 0, 1);
if(gMaterial.UseTexture)
{
texture_color = texture( gTextureSampler0, TexCoord0 );
}
if(gEnableNormalMap)
{
pixel_normal = CalculateNormalMapNormal();
}
LightingResult light_result = CalculateLighting(WorldPos0, pixel_normal);
vec4 diffuse_color = gMaterial.Diffuse * light_result.Diffuse;
vec4 specular_color = gMaterial.Specular * light_result.Specular;
FragmentColor = (gMaterial.Emissive + gMaterial.Ambient + diffuse_color + specular_color) * texture_color;
//FragmentColor = texture_color;
//temp test
//vec3 fragment_to_light = WorldPos0 - gLights[1].Position;
//FragmentColor = vec4(vec3(texture(gLights[1].ShadowMap, fragment_to_light).r / gFarPlane), 1.0);
}
What am I doing wrong? I see that I am storing the distance from fragment to light in world space and it is written to a color buffer (not the depth buffer) and so it shouldn't be in NDC. Finally when I am comparing it, it's also in world space .... Why are the shadows off? It appears as if the shadows are way larger than they should be so the entire scene is covered with shadow and it appears that what should be the size of shadow is actually covered in light.
Picture of the shadow cubemap:
Picture of the scene (only the helicopter will cast shadow):
Thanks!
After some debugging, I found out my problems:
glPerspective takes fov as radians, not degrees even though it's documentation says it's only in radians if FORCE_RADIANS is defined (I did not define that)
The cubemap for shadow require the clear color to be (FLT_MAX, FLT_MAX, FLT_MAX, 1.0) such that everything is NOT in shadow by default.
See EDIT since the first part of the problem is solved.
I am trying to replicate the shadow mapping demo from http://learnopengl.com/#!Advanced-Lighting/Shadows/Shadow-Mapping with my own framework, but interestingly I did not get any shadows. The first significant problem is that my depthmap is not correctly working. I have debugged and double checked each line without success. Maybe another pair of eyes will have more success.
See (top left, 5th row - the image is completely white):
I will write about the second render pass, since it seems that the first one is not working. By the way, the objects are centered at 0, 0, 0. The following code is used for the first render pass:
/// 1. render target is the depth map
glViewport(0, 0, SHADOW_MAP_WIDTH_u32, SHADOW_MAP_HEIGHT_u32);
m_frameBufferObject.bind(); // set the depth map as render target
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
/// place the camera where the light is positioned and render the scene
math::Matrix4D l_lightViewMatrix = math::Matrix4D::lookAt(m_light_p->getPosition(), math::Vector3D(0, 0, 0), math::Vector3D(0, 1, 0));
const math::Matrix4D& l_orthographicLightMatrix_r = m_light_p->getShadowInformation().getProjectionMatrix();
math::Matrix4D lightSpaceMatrix = l_orthographicLightMatrix_r * l_lightViewMatrix;
m_depthMapShader_p->bind();
m_depthMapShader_p->setUniformMat4("lightSpaceMatrix", lightSpaceMatrix);
renderNodes();
m_depthMapShader_p->printShaderInfoLog();
m_depthMapShader_p->unbind();
m_frameBufferObject.unbind();
I have tested that the view matrix and projection matrix generation delivers exactly the same results as GLM (math library for opengl). However, my orthographic matrix is defined by:
left = -10.0f
right = 10.0f
bottom = -10.0f
top = 10.0f
near = -1.0f
far = 7.5f
The initialization of the framebuffer object and the texture is as follows:
// - Create depth texture
glGenTextures(1, &m_shadowTextureBuffer_u32);
glBindTexture(GL_TEXTURE_2D, m_shadowTextureBuffer_u32);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, SHADOW_MAP_WIDTH_u32, SHADOW_MAP_HEIGHT_u32, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
m_frameBufferObject.bind();
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, m_shadowTextureBuffer_u32, 0);
glDrawBuffer(GL_NONE);
glReadBuffer(GL_NONE);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
{
fprintf(stderr, "Error on building shadow framebuffer\n");
exit(EXIT_FAILURE);
}
m_frameBufferObject.unbind();
The fragment and the vertex shader looks like below.
#version 430
// Fragment shader for rendering the depth values to a texture.
out vec4 gl_FragColor;
void main()
{
gl_FragColor = vec4 (gl_FragCoord.z);
}
#version 430
// Vertex shader for rendering the depth values to a texture.
in layout (location = 0) vec3 position;
in layout (location = 1) vec4 color;
in layout (location = 2) vec3 normal;
in layout (location = 3) vec2 uv;
in layout (location = 4) vec3 tangent;
in layout (location = 5) int materialId;
uniform mat4 pr_matrix;
uniform mat4 vw_matrix;
uniform mat4 ml_matrix;
uniform mat4 lightSpaceMatrix;
void main()
{
gl_Position = lightSpaceMatrix * ml_matrix * vec4(position, 1.0);
}
EDIT:
After some sleep, I have found a little error in my renderer and the shader draws a "nice" depth map.
However, it looks like that the texture mapping (depth comparison) is in the same coordinate system.
But the second rendering step is still not correct:
The vertex and the fragment shader for the second render pass looks like
#version 430
in layout (location = 0) vec3 position;
in layout (location = 1) vec4 color;
in layout (location = 2) vec3 normal;
in layout (location = 3) vec2 uv;
in layout (location = 4) vec3 tangent;
in layout (location = 5) int materialId;
uniform mat4 pr_matrix = mat4(1.0);
uniform mat4 vw_matrix = mat4(1.0);
uniform mat4 ml_matrix = mat4(1.0);
uniform mat4 lightSpaceMatrix = mat4(1.0);
out VS_OUT
{
vec4 color;
vec2 texture_coordinates;
vec3 normal;
vec3 tangent;
vec3 binormal;
vec3 worldPos;
vec4 shadowProj;
flat int materialIdOut;
} vs_out;
void main()
{
vs_out.color = color;
vs_out.texture_coordinates = uv;
mat3 normalMatrix = transpose ( inverse ( mat3 ( ml_matrix )));
vs_out.normal = normalize ( normalMatrix * normalize ( normal ));
vs_out.tangent = normalize ( normalMatrix * normalize ( tangent ));
vs_out.binormal = normalize ( normalMatrix * normalize ( cross (normal , tangent )));
vs_out.worldPos = ( ml_matrix * vec4 ( position, 1)).xyz;
vs_out.materialIdOut = materialId;
vs_out.shadowProj = ( lightSpaceMatrix * ml_matrix * vec4 (position, 1.0) );
gl_Position = ( pr_matrix * vw_matrix * ml_matrix ) * vec4 (position, 1.0);
}
and
#version 430
#define MAX_NUM_TEXTURES 5
#define MAX_NUM_MATERIALS 12
struct SMaterial
{
vec3 m_ambient_v3;
vec3 m_diffuse_v3;
vec3 m_specular_v3;
float m_shininess_f32;
int m_textureIds[MAX_NUM_TEXTURES];
};
in VS_OUT
{
vec4 color;
vec2 texture_coordinates;
vec3 normal;
vec3 tangent;
vec3 binormal;
vec3 worldPos;
vec4 shadowProj;
flat int materialIdOut;
} fs_in;
uniform vec3 cameraPos;
uniform mat4 ml_matrix;
uniform mat4 vw_matrix;
uniform sampler2D texSlots[32];
uniform SMaterial material[MAX_NUM_MATERIALS];
uniform SLight light;
out vec4 gl_FragColor;
float shadowCalculation(vec4 fragPosLightSpace)
{
// perform perspective divide
vec3 projCoords = fragPosLightSpace.xyz / fragPosLightSpace.w;
// Transform to [0,1] range
projCoords = projCoords * vec3(0.5) + vec3(0.5);
// Get closest depth value from light's perspective (using [0,1] range fragPosLight as coords)
float closestDepth = texture(texSlots[31], projCoords.xy).r;
// Get depth of current fragment from light's perspective
float currentDepth = projCoords.z;
// Check whether current frag pos is in shadow
float shadow = currentDepth > closestDepth ? 1.0 : 0.0;
return shadow;
}
void main()
{
if ( (fs_in.materialIdOut >= 0) && (fs_in.materialIdOut < MAX_NUM_MATERIALS) )
{
int ambientTextureId = material[fs_in.materialIdOut].m_textureIds[0];
int diffuseTextureId = material[fs_in.materialIdOut].m_textureIds[1];
int specularTextureId = material[fs_in.materialIdOut].m_textureIds[2];
int alphaTextureId = material[fs_in.materialIdOut].m_textureIds[3];
int bumpTextureId = material[fs_in.materialIdOut].m_textureIds[4];
vec3 diffTexColor = vec3(0.6,0.6,0.6);
if ((diffuseTextureId >= 0) && (32 > diffuseTextureId))
{
diffTexColor = texture (texSlots[diffuseTextureId], fs_in.texture_coordinates).rgb;
}
// Calculate shadow
float shadow = 1.0 - shadowCalculation(fs_in.shadowProj);
gl_FragColor = vec4(diffTexColor, 1.0) * vec4(shadow, shadow, shadow, 1.0);
}
else
{
gl_FragColor = vec4(fs_in.normal,1.0);
}
}
In my experience a depth map is pretty much always completely white, because a distance of more than 1 away from the light already makes that pixel white. If your whole scene is further than 1 unit then the whole map is white.
To render the map like they show in the tutorial you either need your scene to be really small or to perform an operation on your depth map. I always like to check my maps by dividing their depth values by the camera's zFar distance. Try to find the best value at which you can see contrast.
I have vertex shader
#version 330 core
layout(location = 0) in vec3 VertexPosition;
layout(location = 1) in vec2 VertexUV;
layout(location = 2) in vec3 VertexNormal;
out VS_GS_VERTEX
{
vec2 UV;
vec3 vs_worldpos;
vec3 vs_normal;
} vertex_out;
uniform mat4 proj_matrix;
uniform mat4 model_matrix;
void main(void)
{
gl_Normal = VertexNormal;
gl_Position = proj_matrix * vec4(VertexPosition, 1.0);
vertex_out.UV = VertexUV; //VertexPosition.xy;
vertex_out.vs_worldpos = gl_Position.xyz;
vertex_out.vs_normal = mat3(model_matrix) * gl_Normal;
}
and fragment shader
#version 330 core
in GS_FS_VERTEX
{
vec2 UV;
vec3 vs_worldpos;
vec3 vs_normal;
} vertex_in;
// Values that stay constant for the whole mesh.
uniform sampler2D sampler0;
uniform sampler2D sampler1;
uniform sampler2D sampler2;
uniform sampler2D sampler3;
//uniform sampler2D alphamap0;
uniform sampler2D alphamap1;
uniform sampler2D alphamap2;
uniform sampler2D alphamap3;
uniform int tex_count;
uniform vec4 color_ambient = vec4(0.75, 0.75, 0.75, 1.0);
uniform vec4 color_diffuse = vec4(0.25, 0.25, 0.25, 1.0);
//uniform vec4 color_specular = vec4(1.0, 1.0, 1.0, 1.0);
uniform vec4 color_specular = vec4(0.1, 0.1, 0.1, 0.25);
uniform float shininess = 5.0f;
uniform vec3 light_position = vec3(12.0f, 32.0f, 560.0f);
void main(){
vec3 light_direction = normalize(light_position - vertex_in.vs_worldpos);
vec3 normal = normalize(vertex_in.vs_normal);
vec3 half_vector = normalize(light_direction + normalize(vertex_in.vs_worldpos));
float diffuse = max(0.0, dot(normal, light_direction));
float specular = pow(max(0.0, dot(vertex_in.vs_normal, half_vector)), shininess);
gl_FragColor = texture( sampler0, vertex_in.UV ) * color_ambient + diffuse * color_diffuse + specular * color_specular;
// http://www.opengl.org/wiki/Texture_Combiners
// GL_MODULATE = *
// GL_INTERPOLATE Blend tex0 and tex1 based on a blending factor = mix(texel0, texel1, BlendFactor)
// GL_INTERPOLATE Blend tex0 and tex1 based on alpha of tex0 = mix(texel0, texel1, texel0.a)
// GL_ADD = clamp(texel0 + texel1, 0.0, 1.0)
if (tex_count > 0){
vec4 temp = texture( sampler1, vertex_in.UV );
vec4 amap = texture( alphamap1, vertex_in.UV);
gl_FragColor = mix(gl_FragColor, temp, amap.a);
}
if (tex_count > 1){
vec4 temp = texture( sampler2, vertex_in.UV );
vec4 amap = texture( alphamap2, vertex_in.UV);
gl_FragColor = mix(gl_FragColor, temp, amap.a);
}
if (tex_count > 2){
vec4 temp = texture( sampler3, vertex_in.UV );
vec4 amap = texture( alphamap3, vertex_in.UV);
gl_FragColor = mix(gl_FragColor, temp, amap.a);
}
}
It takes indexed GL_TRIANGLE_STRIP as input
glBindBuffer(GL_ARRAY_BUFFER, tMt.vertex_buf_id[cx, cy]);
glVertexAttribPointer(VERTEX_LAYOUT_POSITION, 3, GL_FLOAT, false, 0, pointer(0));
glEnableVertexAttribArray(0);
{ chunk tex position }
glBindBuffer(GL_ARRAY_BUFFER, chunkTexPositionBO);
glVertexAttribPointer(VERTEX_LAYOUT_TEX_UV, 2, GL_FLOAT, false, 0, pointer(0));
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, tMt.normal_buf_id[cx, cy]);
glVertexAttribPointer(VERTEX_LAYOUT_NORMAL, 3, GL_FLOAT, true, 0, pointer(0));
glEnableVertexAttribArray(2);
{ index buffer }
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, chunkIndexBO);
for i := 0 to tMt.texCount - 1 do begin
bt := tMt.texture_buf_id[cx, cy][i];
if bt = nil then
break;
glUniform1i(proj_tex_count_loc, i);
glActiveTexture(GL_TEXTURE0 + i);
glBindTexture(GL_TEXTURE_2D, bt.id);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
if i > 0 then begin
// this time, use blending:
glActiveTexture(GL_TEXTURE4 + 1);
glBindTexture(GL_TEXTURE_2D, tMt.alphamaps[cx, cy][i - 1]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
end;
end;
glDrawElements(GL_TRIANGLE_STRIP, length(chunkIndexArr), GL_UNSIGNED_SHORT, nil);
Code works as intended except I'm not sure is my normals arranged properly: they was stored as bytes (converted to GLfloat as b / FF), coordinates xyz changed and some probably need negation.
Can someone show me geometry shader to show normals as lines as shown at http://blogs.agi.com/insight3d/index.php/2008/10/23/geometry-shader-for-debugging-normals/ (those shader not works at all and it seems out/in data losed between vertex and fragment shader).
P.S. I'm not sure I did everything properly (starting OpenGL and GLSL) so any suggestions also appreciated.
Edit:
I made simple geometry shader by examples
// This is a very simple pass-through geometry shader
#version 330 core
layout (triangles) in;
layout (triangle_strip, max_vertices = 145) out;
in VS_GS_VERTEX
{
vec2 UV;
vec3 vs_worldpos;
vec3 vs_normal;
} vertex_in[];
out GS_FS_VERTEX
{
vec2 UV;
vec3 vs_worldpos;
vec3 vs_normal;
} vertex_out;
uniform float uNormalsLength = 0.5;
void main()
{
int i;
// Loop over the input vertices
for (i = 0; i < gl_in.length(); i++)
{
vertex_out.UV = vertex_in[i].UV;
vertex_out.vs_worldpos = vertex_in[i].vs_worldpos;
vertex_out.vs_normal = vertex_in[i].vs_normal;
// Copy the input position to the output
gl_Position = gl_PositionIn[i];
EmitVertex();
gl_Position = gl_ModelViewProjectionMatrix * (gl_PositionIn[i] + (vec4(vertex_in[i].vs_normal, 0) * uNormalsLength));
gl_FrontColor = vec4(0.0, 0.0, 0.0, 1.0); //gl_FrontColorIn[i];
EmitVertex();
}
// End the primitive. This is not strictly necessary
// and is only here for illustrative purposes.
EndPrimitive();
}
but I don't knwo where it takes gl_ModelViewProjectionMatrix (seems deprecated) and result looks awful, it seems everything including normals stripped. Picture in glPolygonMode(GL_FRONT, GL_LINE) mode, textures also trying to map onto those.
As it seems, you're doing it all in a single pass and you actually emit 6 vertices per incoming triangle. This is not what you want.
Either do it in two passes, i.e. one pass for the mesh, the other for the normals, or try to emit the original triangle and a degenerate triangle for the normal. For simplicity I'd go for the two-pass version:
Inside your render loop:
render terrain
if and only if debug geometry is to be rendered
enable your debug normals shader
render the terrain mesh a second time, passing POINTS to the vertex shader
To make this work, you'll need a second program object that is made up like in the blog post you previously linked to, consisting of a simple pass trough vertex shader, the following geometry shader and a fragment shader for coloring the lines representing the normals.
The vertex and fragment shaders should be no problem. Assuming you have a smoothed mesh, i.e. you have actual, averaged vertex normals, you can simply pass in points and emit lines.
#version 330 core
// assuming you have vertex normals, you need to render a vertex
// only a single time. with any other prim type, you may render
// the same normal multiple times
layout (points) in;
// Geometry shaders can only output points, line strips or triangle
// strips by definition. you output a single line per vertex. therefore,
// the maximum number of vertices per line_strip is 2. This is effectively
// the same as rendering distinct line segments.
layout (line_strip, max_vertices = 2) out;
in vec3 vs_normal[];
uniform float normal_scale = 0.5; // don't forget: this is the default value!
/* if you're never going to change the normal_scale, consider simply putting a
constant there instead:
const float normal_scale = 0.5;
*/
void main()
{
// we simply transform and emit the incoming vertex - this is v0 of our
// line segment
vec4 v0 = gl_in[0].gl_Position;
gl_Position = gl_ModelViewProjectionMatrix * v0;
EmitVertex();
// we calculate v1 of our line segment
vec4 v1 = v0 + vec4(vs_normal[0] * normal_scale, 0);
gl_Position = gl_ModelViewProjectionMatrix * v1;
EmitVertex();
EndPrimitive();
}
Warning: Untested code!
This is probably as simple as it gets. Add a uniform to your fragment shader so you can color your normals as you like or simply export a constant color.
Note: This code still uses gl_ModevelViewProjectionMatrix. If you're writing GL core code, please consider replacing legacy GL constructs, like the matrix stack, with your own stuff!
Note 2: Your geometry shader is not what is usually referred to as a pass through shader. First, you do processing on the incoming data that is more than just assigning incoming values to outgoing values. Second, how can it be a pass-through shader, if you generate geometry? Pass-through means, you don't do anything else than pass incoming values to the next shader stage.