Scale 2D Texture to model scaling to prevent streching - c++

I have an OpenGL 3.3 program whichts has different objects in, for example a simple cube. The cube's dimensions are 1x1x1 (vertices from -0.5, -0.5, -0.5 to 0.5, 0.5, 0.5) and is textured with one 2D texture on each side. The texture is repeatable (seamless).
With my actual code the model scaling looks like this (ignore the actual texture):
After scaling like this:
In this case the texture in should stay at size in z-direction but repeate over the z-axis.
Is there a good way to scale the texture properly to the model's scaling to prevent it from stretching? Or do I have to create a 3D texture?
The problem i found is that in my shader I get only the (scaled) point of the cube, for example -0.5, -1,5, -0.5 but the texture's coordinates are only 2D (0.0, 0.0) and I don't know which side of the texture I have to scale since I don't know which side it will currently be rendered on.
For for the sake of completeness, however, the vertex shader code:
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aNormal;
layout (location = 2) in vec2 aTexCoord;
out vec2 TexCoord;
out vec3 FragPos;
out vec3 Normal;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main()
{
FragPos = vec3(model * vec4(aPos, 1.0));
Normal = mat3(transpose(inverse(model))) * aNormal;
TexCoord = aTexCoord;
gl_Position = projection * view * model * vec4(aPos, 1.0);
//gl_Position = projection * view * model * vec4(aPos, 1.0f);
//TexCoord = aTexCoord;
}
The fragment shader looks like this:
out vec4 FragColor;
in vec2 TexCoord;
// texture samplers
uniform sampler2D texture_diffuse1;
uniform vec4 color;
void main()
{
FragColor = color + texture(texture_diffuse1, TexCoord);
}

Related

Color only the face red that is in front of camera?

I am pretty newbie to GLSL and I have been struggling with trying to find a way to color the face of my rotating cube red. I have achieved drawing red on one of the faces getting the normal x but my objective is to make the cube draw a red face to whatever face is facing front to the camera.
Fragment Shader
#version 330
in vec3 normal;
out vec4 fragColor;
in vec4 color;
in vec4 vertexColor;
void main() {
vec3 nor = normalize(normal);
fragColor = vec4(nor.x, 0.0, 0.0, 1.0);
}
Vertex Shader
#version 330
uniform mat4 u_m_matrix;
uniform mat4 u_vp_matrix;
layout (location=0) in vec3 a_position;
layout (location=1) in vec3 a_normal;
out vec3 normal;
out vec4 fragColor;
out vec4 vertexColor;
out vec4 color;
void main()
{
normal = a_normal;
gl_Position = u_vp_matrix * u_m_matrix * vec4(a_position, 1.0);
}
I tried messing with the dot product with normals and the direction at which the camera is looking (0, 0, 1) but I have not achieved anything yet.
This would be the desired effect:
I do in fact think it involves some mathematics ( dot product ), maybe getting the cosTetha and seeing if the vector is completdly perpendicular or not and depending on that drawing the faces red or black?
I view space, the z axis points out of the view port. When a side of the cube faces the camera, then the normal vector in view space is (0, 0, 1). The red color can be get form the z component of the normal vector.
But the normal vector has to be transformed from modle space to view space (in the vertex shader). For that you have to know the view matrix:
mat3 normalMat = inverse(transpose(mat3(u_v_matrix * u_m_matrix)));
normal = normalMat * a_normal;
In the fragment shader, the red color channel can be get from the z component:
vec3 nor = normalize(normal);
fragColor = vec4(nor.z, 0.0, 0.0, 1.0);
You can approximate a normal vector in normalized device space, by transforming with mat3(u_vp_matrix * u_m_matrix). That's inaccurate, but it tints the faces dependent on its orientation, too. In normalized device space, the z axis points into the viewport. e.g:
Vertex shader:
#version 330
uniform mat4 u_m_matrix;
uniform mat4 u_vp_matrix;
layout (location=0) in vec3 a_position;
layout (location=1) in vec3 a_normal;
out vec3 normal;
void main()
{
normal = mat3(u_vp_matrix * u_m_matrix) * a_normal;
gl_Position = u_vp_matrix * u_m_matrix * vec4(a_position, 1.0);
}
Fragment shader:
#version 330
in vec3 normal;
out vec4 fragColor;
void main() {
vec3 nor = normalize(normal);
fragColor = vec4(-nor.z, 0.0, 0.0, 1.0);
}
If you just want to color the face which that faces the camera, then you have to compare the cosine of the angle between the normal vector of the face and the view space z axis by the cosine of 45°. step compares a value to a edge and returns 0.0 or 1.0, dependent on the result:
Vertex shader:
mat3 normalMat = inverse(transpose(mat3(u_v_matrix * u_m_matrix)));
normal = normalMat * a_normal;
Fragment shader:
vec3 nor = normalize(normal);
//float red = step(0.707, abs(dot(nor, vec3(0.0, 0.0, 1.0))));
float red = step(0.707, dot(nor, vec3(0.0, 0.0, 1.0)));
fragColor = vec4(red, 0.0, 0.0, 1.0);

How to make a retro/neon/glow effect using shaders?

Let's say the concept is to create a map consisting of cubes with a neon aesthetic, such as:
Currently I have this vertex shader:
// Uniforms
uniform mat4 u_projection;
uniform mat4 u_view;
uniform mat4 u_model;
// Vertex atributes
in vec3 a_position;
in vec3 a_normal;
in vec2 a_texture;
vec3 u_light_direction = vec3(1.0, 2.0, 3.0);
// Vertex shader outputs
out vec2 v_texture;
out float v_intensity;
void main()
{
vec3 normal = normalize((u_model * vec4(a_normal, 0.0)).xyz);
vec3 light_dir = normalize(u_light_direction);
v_intensity = max(0.0, dot(normal, light_dir));
v_texture = a_texture;
gl_Position = u_projection * u_view * u_model * vec4(a_position, 1.0);
}
And this pixel shader:
in float v_intensity;
in vec2 v_texture;
uniform sampler2D u_texture;
out vec4 fragColor;
void main()
{
fragColor = texture(u_texture, v_texture) * vec4(v_intensity, v_intensity, v_intensity, 1.0);
}
How would I use this to create a neon effect such as in the example for 3D cubes? The cubes are simply models with a mesh/material. The only change would be to set the material color to black and the outlines to a bright pink or blue (maybe with a glow).
Any help is appreciated. :)
You'd normally implement this as a post-processing effect. First render with bright, saturated colours into a texture, then apply a bloom effect, when drawing that texture to screen.

Why do my specular highlights show up so strongly on polygon edges?

I have a simple application that draws a sphere with a single directional light. I'm creating the sphere by starting with an octahedron and subdividing each triangle into 4 smaller triangles.
With just diffuse lighting, the sphere looks very smooth. However, when I add specular highlights, the edges of the triangles show up fairly strongly. Here are some examples:
Diffuse only:
Diffuse and Specular:
I believe that the normals are being interpolated correctly. Looking at just the normals, I get this:
In fact, if I switch to a flat shading, where the normals are per-polygon instead of per-vertex, I get this:
In my vertex shader, I'm multiplying the model's normals by the transpose inverse modelview matrix:
#version 330 core
layout (location = 0) in vec4 vPosition;
layout (location = 1) in vec3 vNormal;
layout (location = 2) in vec2 vTexCoord;
out vec3 fNormal;
out vec2 fTexCoord;
uniform mat4 transInvModelView;
uniform mat4 ModelViewMatrix;
uniform mat4 ProjectionMatrix;
void main()
{
fNormal = vec3(transInvModelView * vec4(vNormal, 0.0));
fTexCoord = vTexCoord;
gl_Position = ProjectionMatrix * ModelViewMatrix * vPosition;
}
and in the fragment shader, I'm calculating the specular highlights as follows:
#version 330 core
in vec3 fNormal;
in vec2 fTexCoord;
out vec4 color;
uniform sampler2D tex;
uniform vec4 lightColor; // RGB, assumes multiplied by light intensity
uniform vec3 lightDirection; // normalized, assumes directional light, lambertian lighting
uniform float specularIntensity;
uniform float specularShininess;
uniform vec3 halfVector; // Halfway between eye and light
uniform vec4 objectColor;
void main()
{
vec4 texColor = objectColor;
float specular = max(dot(halfVector, fNormal), 0.0);
float diffuse = max(dot(lightDirection, fNormal), 0.0);
if (diffuse == 0.0)
{
specular = 0.0;
}
else
{
specular = pow(specular, specularShininess) * specularIntensity;
}
color = texColor * diffuse * lightColor + min(specular * lightColor, vec4(1.0));
}
I was a little confused about how to calculate the halfVector. I'm doing it on the CPU and passing it in as a uniform. It's calculated like this:
vec3 lightDirection(1.0, 1.0, 1.0);
lightDirection = normalize(lightDirection);
vec3 eyeDirection(0.0, 0.0, 1.0);
eyeDirection = normalize(eyeDirection);
vec3 halfVector = lightDirection + eyeDirection;
halfVector = normalize(halfVector);
glUniform3fv(halfVectorLoc, 1, &halfVector [ 0 ]);
Is that the correct formulation for the halfVector? Or does it need to be done in the shaders as well?
Interpolating normals into a face can (and almost always will) result in a shortening of the normal. That's why the highlight is darker in the center of a face and brighter at corners and edges. If you do this, just re-normalize the normal in the fragment shader:
fNormal = normalize(fNormal);
Btw, you cannot precompute the half vector as it is view dependent (that's the whole point of specular lighting). In your current scenario, the highlight will not change when you just move the camera (keeping the direction).
One way to do this in the shader is to pass an additional uniform for the eye position and then calculate the view direction as eyePosition - vertexPosition. Then continue as you did on the CPU.

OpenGL texture and UV mapping issue

I am currently trying to make a little game in OpenGL as an attempt to learn how to use the API. I've come to a point where I can move a camera around a simple scene, and I can render models and shade them with a simple phong model shader.
I'm right now working on texturing the models in the scene, so I got a copy of Maya and made (with quite some struggle) a square with a texture with the UV mapping made in within Maya.
When I render the scene, the texture is applied, but far from correct. I read the models as .obj files with a parser I wrote myself, and the textures are read using a funtion I found online a while back.
I'm not sure how to describe the problem in sufficient detail, nor what to look for in the code, but here are some code fractions that I would suspect contained the problem.
Reading the texture
GLuint loadTexture(Image* image){
GLuint textureId;
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGB,
image->width, image->height,
0,
GL_RGB,
GL_UNSIGNED_BYTE,
image->pixels);
return textureId;
}
Setting the texture prior to rendering the mesh
// set texture
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, this->body_texture);
current_shader->setUniformint(0, "Difuse_texture");
Vertex shader
#version 410
layout(location = 0) in vec3 VertexPosition;
layout(location = 1) in vec3 VertexNormal;
layout(location = 1) in vec2 TextureCoord;
out vec3 Position;
out vec3 Normal;
out vec2 TexCoord;
uniform mat4 ModelMatrix;
uniform mat4 VeiwMatrix;
uniform mat4 ProjectionMatrix;
uniform mat3 NormalMatrix;
void main(){
mat4 ModelVeiwMatrix = VeiwMatrix * ModelMatrix;
mat4 MVP = ProjectionMatrix * ModelVeiwMatrix;
TexCoord = TextureCoord;
Normal = normalize( NormalMatrix * VertexNormal );
Position = vec3(ModelVeiwMatrix * vec4(VertexPosition, 1.0));
gl_Position = MVP * vec4(VertexPosition, 1.0);
}
Fragment shader
#version 410
in vec3 Position;
in vec3 Normal;
in vec2 TexCoord;
uniform vec4 LightPosition;
uniform vec3 LightIntensity;
uniform vec3 Kd;
uniform vec3 Ka;
uniform vec3 Ks;
uniform float Shininess;
uniform sampler2D Difuse_texture;
layout(location = 0) out vec4 FragColor;
vec4 ads(){
vec3 n = normalize( Normal );
vec3 s = normalize( vec3(LightPosition) - Position );
vec3 v = normalize( vec3(-Position) );
vec3 r = reflect( -s, n );
vec3 specular_light = Ks * pow(max(dot(r, v), 0.0), Shininess);
vec3 ad_light = Ka + Kd * max(dot(s, n), 0.0);
vec4 TexColor = texture2D(Difuse_texture, TexCoord);
return TexColor; // (vec4(LightIntensity, 1.0) * (vec4(ad_light, 1.0) * TexColor + vec4(specular_light, 1.0)));
}
void main() {
FragColor = ads();
}
I know some things are written strangely, but at this point I'm starting to just try anything to get it working.
Does anyone have a suggestion on how to solve this strange UV mapping?
EDIT:
OBJ LOADING
I have made the obj loader print all vertex attributes and compared these with the indexing in the .obj file. It looks like the verecies, normals and UVs are showing in the correct order.
Screenshot
The scene looks like this using just simple reg to green gradient as trexture image.
(The square should by my understading show the gradient from the texture? not just a single color)
Alignment sounds like a possible flaw, how can I correct this?
a http://imageshack.com/a/img674/9927/y0bJ51.png
SOLUTION
I made a very simple and easy to overlook mistake. In the top of the vertex shader i wrote
layout(location = 0) in vec3 VertexPosition;
layout(location = 1) in vec3 VertexNormal;
layout(location = 1) in vec2 TextureCoord;
So I guess that when I sent the normal data to location 1, I set the Texture coordinates to normal data, so the UV coords never reached the fragment shader.
Changeing to the folowing resolved the problem without further change.
layout(location = 0) in vec3 VertexPosition;
layout(location = 1) in vec3 VertexNormal;
layout(location = 2) in vec2 TextureCoord;

Diffuse normal inversed when 3D model too large (cilinder) using ASSIMP and Phong shading

Currently I'm setting up some lighting in a 3D scene I created in Blender and loaded via assimp with the following options set:
aiProcess_GenSmoothNormals | aiProcess_Triangulate | aiProcess_CalcTangentSpace | aiProcess_FlipUVs
Currently I'm stuck on a really weird glitch in my program. I'm implementing Phong shading on the fragment shader for lighting with the following properties:
Each of the models have textures set up.
Each of the models have normal vectors loaded from the model (with some pre-calculations on them, probably because of the aiProcess_GenSmoothNormals flag)
The specular light is working on all objects.
Diffuse colors work as they should.
However, the pipe objects are different: the diffuse colors are always at the opposite side of the pipe that should be lit, while the specular light is on the correct side. This makes things weird, since the specular light is working as it should, while the diffuse component is always on the wrong side. I noticed this effect when scaling my cilinder objects beyond a certain point in blender (smaller scaled cilinders still work as they should) so scaling cilinder objects beyond a certain treshold probably has something to do with it.
My scene, where the pipe-like objects have working specular components but the diffuse colors are on the opposite side of the light source.
Normals as seen in blender
My first guess was that it had something to do with normal scaling, but I already used a normal matrix for that purpose in the vertex shader and the other objects in my scene work just fine.
Vertex Shader:
#version 330
layout (location = 0) in vec3 vertex;
layout(location = 1) in vec3 normal;
layout(location = 2) in vec3 tangent;
layout(location = 3) in vec3 color;
layout(location = 4) in vec2 texCoord;
uniform mat4 projection;
uniform mat4 view;
uniform mat4 model;
uniform vec3 lightPos;
out vec3 Position;
out vec3 Normal;
out vec3 LightPos;
out vec2 TexCoord;
void main()
{
gl_Position = projection * view * model * vec4(vertex, 1.0);
// Position
Position = vec3(view * model * vec4(vertex, 1.0));
// Normal
mat3 normalMat = transpose(inverse(mat3(view * model)));
Normal = normalMat * normal;
// Lighting
LightPos = vec3(view * vec4(lightPos, 1.0));
// Texture
TexCoord = texCoord;
}
Fragment Shader:
#version 330
in vec3 Position;
in vec3 Normal;
in vec3 LightPos;
in vec2 TexCoord;
uniform sampler2D texture0;
out vec4 outColor;
void main()
{
// defaults
vec4 ambient = vec4(0.2);
vec4 diffuse = vec4(0.4);
vec4 specular = vec4(0.5);
vec4 texColor = texture(texture0, TexCoord);
// Phong shading
vec3 LightDir = normalize(LightPos - Position);
vec3 Norm = normalize(Normal);
vec3 ViewDir = normalize(-Position);
vec3 ReflectDir = reflect(-LightDir,Norm);
float specularContribution = pow(max(dot(ViewDir, ReflectDir), 0.0), 32);
// Calculate diffuse component
vec4 I = diffuse * max(dot(Norm, LightDir), 0.0);
diffuse = clamp(I, 0.0, 1.0);
// Calculate specular component
specular = specular * specularContribution;
outColor = texColor * (diffuse + ambient + specular);
}
Edit
I added a geometry shader that displays all the vertex normals in a second drawing pass for debugging purposes. However, when displaying the normals they are slightly moving with camera movement which they should not do. I am guessing this is probably the cause for my beforementioned issue.
I made a small video illustrating the normal movement: Youtube video that displays the normal movement issues.
The video shows the pink normals changing direction as the camera moves. This should not be the case and I don't know why. Is it an incorrect normal matrix or maybe assimp loads the normals incorrectly?