OpenGL texture no matching overload function found - opengl

first of all my Code of my Fragmenshader.
#version 330 core
struct Material{
sampler2D diffuse;
};
struct Light{
vec3 position;
vec3 ambient;
vec3 diffuse;
};
in vec3 Normal;
in vec3 FragPos;
in vec3 TexCoords;
out vec4 color;
uniform vec3 viewPos;
uniform Material material;
uniform Light light;
void main()
{
//ambient
vec3 ambient = light.ambient * vec3(texture(material.diffuse, TexCoords));
//Diffuse
vec3 norm = normalize(Normal);
vec3 lightDir = normalize(light.position - FragPos);
float diff = max(dot(norm,lightDir),0.0);
vec3 diffuse = light.diffuse * diff *vec3(texture(material.diffuse,TexCoords));
color = vec4(ambient+diffuse,1.0f);
}
If i want to compile i get the error:
'texture': not mathcing overloaded function found (using implicit conversion)
I looked at the GLSL documentation, but i looks correct. After that i searched for an error in my OpenGL file... but i looks ok.

You are trying to read from a 2D sampler using 3D coordinates. Either change the in vec3 TexCoords to in vec2 TexCoords or change the texture lookup from texture(material.diffuse, TexCoords) to texture(material.diffuse, TexCoords.xy)

Related

GLSL having strange black faces with the second mesh

platform: Windows10
context: OpenGL, glew, Win32
So I loaded 2 meshes(using a simple OBJ parser, which only reads the triangulated mesh), with vertexpos,uv and normal data. The first mesh is lighted okay. No black faces.The second one looks like this.
The Strange Effects
my vertex shader:
#version 440
in vec3 pos;
in vec2 tex;
in vec3 nor;
uniform float Scale;
uniform mat4 perspective;
uniform mat4 model;
out vec3 normaldir;
out vec2 texOut;
out vec3 FragPos;
void main()
{
normaldir = normalize(mat3(transpose(inverse(model))) * nor);
gl_Position = perspective * model * vec4(pos.xyz, 1.0);
texOut = tex;
FragPos = vec3(model * vec4(pos, 1.0));
}
my fragment shader:
#version 440
uniform float Scale;
uniform sampler2D diffuse;
uniform sampler2D normal;
uniform vec3 viewPos;
//uniform sampler2D normalMap0;
in vec3 normaldir;
in vec2 texOut;
in vec3 FragPos;
layout(location = 0) out vec4 FragColor0;
void main()
{
vec3 lightPos = {2,6,0};
lightPos.x = sin(Scale)*5;
lightPos.z = cos(Scale)*5;
vec3 lightDir = normalize(lightPos - FragPos);
vec3 lightColor = {1.0,1.0,1.0};
float specularStrength = 1.6;
float diff = max(dot(normaldir, lightDir), 0.0);
vec3 diffuseD = diff * lightColor;
vec3 viewDir = normalize(viewPos - FragPos);
vec3 reflectDir = reflect(-lightDir, normaldir);
vec3 ambient = {0.0,0.2,0.4};
float spec = pow(max(dot(viewDir, reflectDir), 0.0), 25);
vec3 specular = specularStrength * spec * lightColor;
vec3 diffuseCol = texture(diffuse, texOut).xyz;
vec3 result = (ambient + diffuseD+ specular) * diffuseCol;
FragColor0 = vec4(result, 1.0);
}
Sorry I made a very dumb mistake. Thank you for all your support #Rabbid76 (Yes I did inverted the normals yes) #paddy
The problem was Binding the normal buffers. I bind glm::vec2 * size instead of glm::vec3 * size for normals' buffers

Phong lighting leaves stripes pattern

I am learning lighting on OpenGL, in this example, I used the phong method. Everything seems to be working as intended, but I notice that this method leaves some "stripes", especially in the diffuse part of the resulting color:
Vertex shader:
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 normal;
struct Light{
vec3 position;
vec3 ambient;
vec3 diffuse;
vec3 specular;
};
uniform mat4 Model;
uniform mat4 View;
uniform mat4 Projection;
uniform Light light;
out vec3 aNormal;
out vec3 FragPos;
out vec3 LightPos;
//Usually you would transform the input into coordinates that fall within OpenGL's visible region
void main()
{
aNormal = mat3(transpose(inverse(View * Model))) * normal;
FragPos = vec3(View * Model * vec4(aPos,1.0));
gl_Position = Projection * vec4(FragPos,1.0);
LightPos = vec3(View * vec4(light.position,1.0));
}
#version 330 core
out vec4 FragColor;
in vec3 aNormal;
in vec3 FragPos;
in vec3 LightPos;
struct Material{
vec3 ambient;
vec3 diffuse;
vec3 specular;
float shininess;
};
struct Light{
vec3 position;
vec3 ambient;
vec3 diffuse;
vec3 specular;
};
uniform Material material;
uniform Light light;
precision highp float;
void main()
{
vec3 ambient = material.ambient * light.ambient;
vec3 norm = normalize(aNormal);
//Difuse
vec3 lightDir = normalize(LightPos - FragPos);
float diff = max(dot(norm,lightDir),0.0);
vec3 diffuse = (diff * material.diffuse) * light.diffuse;
vec3 specular = vec3(0);
//Specular
if(diff > 0){
vec3 viewDir = normalize(-FragPos);
vec3 reflectDir = reflect(-lightDir,norm);
float spec = pow(max(dot(viewDir,reflectDir),0.0), material.shininess);
specular = light.specular * (spec * material.specular);
}
vec3 result = diffuse + (ambient + specular);
FragColor = vec4(result, 1.0);
}
Fragment shader:
I was wondering why that happens, I have a suspicion that is related to float precision, however I tried to use precision highp float; and it had no effect. What is causing this? Is there a way to fix it?

How do you incorporate per pixel lighting in shaders with LIBGDX?

So I've currently managed to write a shader using Xoppa tutorials and use an AssetManager, and I managed to bind a texture to the model, and it looks fine.
[[1
Now the next step I guess would be to create diffuse(not sure if thats the word? phong shading?) lighting(?) to give the bunny some form of shading. While I have a little bit of experience with GLSL shaders in LWJGL, I'm unsure how to process that same information so I can use it in libGDX and in the glsl shaders.
I understand that this all could be accomplished using the Environment class etc. But I want to achieve this through the shaders alone or by traditional means simply for the challenge.
In LWJGL, shaders would have uniforms:
in vec3 position;
in vec2 textureCoordinates;
in vec3 normal;
out vec2 pass_textureCoordinates;
out vec3 surfaceNormal;
out vec3 toLightVector;
uniform mat4 transformationMatrix;
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
uniform vec3 lightPosition;
This would be reasonably easy for me to calculate in LWJGL
Vertex file:
attribute vec3 a_position;
attribute vec3 a_normal;
attribute vec2 a_texCoord0;
uniform mat4 u_worldTrans;
uniform mat4 u_projViewTrans;
varying vec2 v_texCoords;
void main() {
v_texCoords = a_texCoord0;
gl_Position = u_projViewTrans * u_worldTrans * vec4(a_position, 1.0);
}
I imagine that I could implement the uniforms similarly to the LWGL glsl example, but I dont know how I can apply these uniforms into libgdx and have it work. I am unsure what u_projViewTrans is, I'm assuming it is a combination of the projection, transformation and view matrix, and setting the uniform with the camera.combined?
If someone could help me understand the process or point to an example of how (per pixel lighting?), can be implemented with just the u_projViewTrans and u_worldTrans, I'd greatly appreciate your time and effort in helping me understand these concepts a bit better.
Heres my github upload of my work in progress.here
You can do the light calculations in world space. A simple lambertian diffuse light can be calculated like this:
vec3 toLightVector = normalize( lightPosition - vertexPosition );
float ligtIntensity = max( 0.0, dot( normal, toLightVector ));
A detailed explanation can be found in the answer to the Stackoverflow question How does this faking the light work on aerotwist?.
While Gouraud shading calculates the light in the the vertex shader, Phong shading calculates the light in the fragment shader.
(see further GLSL fixed function fragment program replacement)
A Gouraud shader may look like this:
Vertex Shader:
attribute vec3 a_position;
attribute vec3 a_normal;
attribute vec2 a_texCoord0;
uniform mat4 u_worldTrans;
uniform mat4 u_projViewTrans;
uniform vec3 lightPosition;
varying vec2 v_texCoords;
varying float v_lightIntensity;
void main()
{
vec4 vertPos = u_worldTrans * vec4(a_position, 1.0);
vec3 normal = normalize(mat3(u_worldTrans) * a_normal);
vec3 toLightVector = normalize(lightPosition - vertPos.xyz);
v_lightIntensity = max( 0.0, dot(normal, toLightVector));
v_texCoords = a_texCoord0;
gl_Position = u_projViewTrans * vertPos;
}
Fragment Shader:
varying vec2 v_texCoords;
varying float v_lightIntensity;
uniform sampler2D u_texture;
void main()
{
vec4 texCol = texture( u_texture, v_texCoords.st );
gl_FragColor = vec4( texCol.rgb * v_lightIntensity, 1.0 );
}
A Phong shading may look like this:
Vertex Shader:
attribute vec3 a_position;
attribute vec3 a_normal;
attribute vec2 a_texCoord0;
uniform mat4 u_worldTrans;
uniform mat4 u_projViewTrans;
varying vec2 v_texCoords;
varying vec3 v_vertPosWorld;
varying vec3 v_vertNVWorld;
void main()
{
vec4 vertPos = u_worldTrans * vec4(a_position, 1.0);
v_vertPosWorld = vertPos.xyz;
v_vertNVWorld = normalize(mat3(u_worldTrans) * a_normal);
v_texCoords = a_texCoord0;
gl_Position = u_projViewTrans * vertPos;
}
Fragment Shader:
varying vec2 v_texCoords;
varying vec3 v_vertPosWorld;
varying vec3 v_vertNVWorld;
uniform sampler2D u_texture;
struct PointLight
{
vec3 color;
vec3 position;
float intensity;
};
uniform PointLight u_pointLights[1];
void main()
{
vec3 toLightVector = normalize(u_pointLights[0].position - v_vertPosWorld.xyz);
float lightIntensity = max( 0.0, dot(v_vertNVWorld, toLightVector));
vec4 texCol = texture( u_texture, v_texCoords.st );
vec3 finalCol = texCol.rgb * lightIntensity * u_pointLights[0].color;
gl_FragColor = vec4( finalCol.rgb * lightIntensity, 1.0 );
}

How can I use normals from OBJ file for diffuse lighting?

I'm trying implement diffuse lighting. My vertex and fragment shader looks like this:
#version 150
attribute vec3 position;
attribute vec3 color;
attribute vec3 normal;
out vec3 inColor;
out vec3 inNormal;
out vec3 inPosition;
uniform mat4 transform;
void main(void) {
gl_Position = transform * vec4(position, 1.0);
inColor = color;
inNormal = normal;
inPosition = position;
}
------------------------------------
#version 150
out vec4 outColor;
in vec3 inColor;
in vec3 inNormal;
in vec3 inPosition;
uniform vec3 lightPosition;
void main(void) {
vec3 lightVector = normalize(lightPosition - inPosition);
float brightness = clamp(dot(lightVector, inNormal), 0, 1);
outColor = vec4(brightness, brightness, brightness, 1.0f);
}
The question is: What am I suppose to do with the normals after I load them from OBJ file?

GLSL normals change with a camera rotation

I know that same questions were asked many times, but unfortunately I am unable to find the source of my problem.
With help of tutorials I've written a small GLSL shader. Right now it can work with ambient light and load normals from normal map. The issue is that directional light seems to be dependent on my viewing angle.
Here are my shaders:
//Vertex Shader
#version 120
attribute vec3 position;
attribute vec2 texCoord;
attribute vec3 normal;
attribute vec3 tangent;
varying vec2 texCoord0;
varying mat3 tbnMatrix;
uniform mat4 transform;
void main(){
gl_Position=transform * vec4(position, 1.0);
texCoord0 = texCoord;
vec3 n = normalize((transform*vec4(normal,0.0)).xyz);
vec3 t = normalize((transform*vec4(tangent,0.0)).xyz);
t=normalize(t-dot(t,n)*n);
vec3 btTangent=cross(t,n);
tbnMatrix=transpose(mat3(t,btTangent,n));
}
//Fragment Shader
#version 120
varying vec2 texCoord0;
varying mat3 tbnMatrix;
struct BaseLight{
vec3 color;
float intensity;
};
struct DirectionalLight{
BaseLight base;
vec3 direction;
};
uniform sampler2D diffuse;
uniform sampler2D normalMap;
uniform vec3 ambientLight;
uniform DirectionalLight directionalLight;
vec4 calcLight(BaseLight base, vec3 direction,vec3 normal){
float diffuseFactor=dot(normal,normalize(direction));
vec4 diffuseColor = vec4(0,0,0,0);
if(diffuseFactor>0){
diffuseColor=vec4(base.color,1.0)* base.intensity *diffuseFactor;
}
return diffuseColor;
}
vec4 calcDirectionalLight(DirectionalLight directionalLight ,vec3 normal){
return calcLight(directionalLight.base,directionalLight.direction,normal);
}
void main(){
vec3 normal =tbnMatrix*(255.0/128.0* texture2D(normalMap,texCoord0).xyz-255.0/256.0);
vec4 totalLight = vec4(ambientLight,0) +calcDirectionalLight(directionalLight, normal);
gl_FragColor=texture2D(diffuse,texCoord0)*totalLight;
}
"transform" matrix that I send to the shader is summarily computed this way:
viewProjection=m_perspective* glm::lookAt(CameraPosition,CameraPosition+m_forward,m_up);
glm::mat4 transform = vievProjection * object_matrix;
"object_matrix" is a matrix that I get directly from physics engine.(I think it's the matrix that defines position and rotation of the object in the world space, correct me if I'm wrong.)
And I guess that "transform" matrix is computed correctly, since all the objects are drawn in the right positions. It looks like the problem is related to normals because if I set gl_FragColor = vec4(normal, 0) the color is also changing with camera rotation.
I would greatly appreciate if anyone could point me to my mistake