Problems with water shader (don't want to refract object above water) - glsl

I have created a water shader which has a problem with refraction. It's not only refracting object under water (which it should) - it's also refracting object which is not in water.
Here is the full source of the fragment shader.
varying vec2 v_UV1;
uniform sampler2D u_Scene;
uniform sampler2D u_Depth;
uniform sampler2D u_WaterDUDV;
uniform vec2 u_ViewSize;
uniform float u_Time;
uniform vec3 u_Color;
uniform float u_Transparency;
void main() {
float transparency = u_Transparency;
vec4 waterColor = vec4(u_Color, 1.0);
vec2 uv = v_UV1 + vec2(0.3, 0.1) * u_Time;
vec2 dudv = texture2D(u_WaterDUDV, uv).rg * 2.0 - 1.0;
vec2 finalSceneUV = gl_FragCoord.xy / u_ViewSize;
vec2 sceneUV = gl_FragCoord.xy / u_ViewSize;
vec2 sceneOffsetUV = sceneUV + dudv * 0.007;
float sceneDepth = texture2D(u_Depth, sceneUV).r;
float sceneOffsetDepth = texture2D(u_Depth, sceneOffsetUV).r;
if (sceneDepth < gl_FragCoord.z) {
transparency = 0.0;
} else {
// The cube is viewed though water.
finalSceneUV = sceneOffsetUV;
}
vec4 sceneColor = texture2D(u_Scene, finalSceneUV);
vec4 finalColor = mix(sceneColor, waterColor, transparency);
gl_FragColor = finalColor;
}
Any ideas how this could be fixed?

Related

GLSL multi-texturing- blending textures

I'm trying to implement multi-texturing in OpenGL. The texture used is based on the surface normal of a given vertex- The more vertical it is, the more of the second texture is visible.
Here is what I have so far.
I want to blend the edges together now rather than having that hard edge. Is it possible to blend the textures in this way? if so how do I do that?
This is my fragment shader code:
#version 150
in vec2 pass_textureCoords;
in vec3 surfaceNormal;
in vec3 toLightVector;
in vec3 toCamera;
in vec3 playerPosition;
in vec4 vertexPosition;
in float blendPosition;
in float visibility;
out vec4 out_Color;
uniform sampler2D texture0;
uniform sampler2D texture1;
uniform vec3 skyColour;
uniform vec3 light_colour;
uniform float shineDamper;
uniform float reflectivity;
void main(void){
vec3 unitNormal = normalize(surfaceNormal);
vec3 unitLightVector = normalize(toLightVector);
float nDot1 = dot(unitNormal,unitLightVector);
float brightness = max(nDot1,0.2);
vec3 diffuse = brightness * light_colour;
vec3 unitToCamera = normalize(toCamera);
vec3 lightDirection = -unitLightVector;
vec3 reflectedLightDirection = reflect(lightDirection,unitNormal);
float specular = dot(reflectedLightDirection, unitToCamera);
specular = max(specular,0.0);
float damped = pow(specular,shineDamper);
vec3 finalSpecular = damped * reflectivity * light_colour;
out_Color = (vec4(diffuse,1.0) * texture(texture0,pass_textureCoords)) + vec4(-20,-20,0.0,0.0);
out_Color = (vec4(diffuse,1.0) * texture(texture0,pass_textureCoords));
out_Color = mix(vec4(skyColour,1.0),out_Color,visibility);
if(vertexPosition.y < -6.1 || surfaceNormal.y < 0.6){
out_Color = (vec4(diffuse,1.0) * texture(texture1,pass_textureCoords)) + vec4(-20,-20,0.0,0.0);
out_Color = (vec4(diffuse,1.0) * texture(texture1,pass_textureCoords));
out_Color = mix(vec4(diffuse,1.0) * texture(texture0,pass_textureCoords),out_Color,1);
out_Color = mix(vec4(skyColour,1.0),out_Color,visibility);
}
if(playerPosition.y < -6.1){
out_Color = mix(vec4(0.0,0.3,0.5,1.0),out_Color,0.1);
}
}
EDIT:
This is the new fragment shader code for anyone interested
Updated fragment shader code:
#version 150
in vec2 pass_textureCoords;
in vec3 surfaceNormal;
in vec3 toLightVector;
in vec3 toCamera;
in vec3 playerPosition;
in vec4 vertexPosition;
in float blendPosition;
in float visibility;
out vec4 out_Color;
uniform sampler2D texture0;
uniform sampler2D texture1;
uniform vec3 skyColour;
uniform vec3 light_colour;
uniform float shineDamper;
uniform float reflectivity;
void main(void){
vec3 unitNormal = normalize(surfaceNormal);
vec3 unitLightVector = normalize(toLightVector);
float nDot1 = dot(unitNormal,unitLightVector);
float brightness = max(nDot1,0.2);
vec3 diffuse = brightness * light_colour;
vec3 unitToCamera = normalize(toCamera);
vec3 lightDirection = -unitLightVector;
vec3 reflectedLightDirection = reflect(lightDirection,unitNormal);
float specular = dot(reflectedLightDirection, unitToCamera);
specular = max(specular,0.0);
float damped = pow(specular,shineDamper);
vec3 finalSpecular = damped * reflectivity * light_colour;
out_Color.a = 1;
vec4 fog = vec4(skyColour,1.0);
vec4 diffusion = vec4(diffuse,1.0);
float a = clamp((unitNormal.y - .6)*5 + .5, 0, 0.7);
vec3 texture0_colour = (mix(fog,diffusion * texture(texture0,pass_textureCoords),visibility)).rgb;
vec3 texture1_colour = (mix(fog,diffusion * texture(texture1,pass_textureCoords),visibility)).rgb;
out_Colour.rgb = mix(texture1_colour,texture0_colour,a);
}
To mix two texture based on a value a you do:
float a = ...;
vec3 color0 = texture(texture0, pass_textureCoords).rgb;
vec3 color1 = texture(texture1, pass_textureCoords).rgb;
out_Color.rgb = mix(color0, color1, a);
Assuming that your unitNormal = (0,1,0) is the upwards direction, as it appears from the code, then the value of
float a = clamp(unitNormal.y, 0, 1);
will result in a smooth transition between the two textures. However, you probably want a sharper transition, in which case you shift and scale the unitNormal.y value to adjust where the transition starts and ends:
float a = clamp((unitNormal.y - .6)*5 + .5, 0, 1);

oversaturation in BRDF calculation

Edit:
In hindsight those images may be correct since it's just showing the vector differences, so assuming it's correct the issue is actually somewhere in the code regarding BRDF . I've added the full shader code and I'm attaching a new screenshot showing the artifacts I'm seeing. It seems to be over saturated in certain angles..
The issue is potentially in the distribution.. I tried a beckmann distribution model also and it showed the same type of issue..
See here as the light source moves down over the terrain from .. It's over saturating on the right hand side..
light at horizon
light just above horizon
I'm having some issues calculating directions in the vertex shader, the direction is skewed to one corner (the origin)
I create the terrain using instancing however the same issue happens if I just use a static plane.
my vertex shader looks like this (using ogre3d)
# version 330 compatibility
# define MAP_HEIGHT_FACTOR 50000
# define MAP_SCALE_FACTOR 100
#
// attributes
in vec4 blendIndices;
in vec4 uv0;
in vec4 uv1;
in vec4 uv2;
in vec4 position;
in vec2 vtx_texcoord0;
uniform mat4 viewProjMatrix;
uniform mat4 modelMatrix;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
uniform mat4 worldMatrix;
uniform vec3 cameraPosition;
uniform vec3 sunPosition;
out vec4 vtxPosWorld;
out vec3 lightDirection;
out vec3 viewVector;
uniform sampler2D heightmap;
uniform mat4 worldViewProjMatrix;
void main()
{
vec4 vtxPosWorld = vec4((gl_Vertex.x * MAP_SCALE_FACTOR) + uv0.w,
(gl_Vertex.y * MAP_SCALE_FACTOR) + uv1.w,
(gl_Vertex.z * MAP_SCALE_FACTOR) + uv2.w,
1.0 ) * worldMatrix;
l_texcoord0 = vec2((vtxPosWorld.x)/(8192*MAP_SCALE_FACTOR), (vtxPosWorld.z)/(8192*MAP_SCALE_FACTOR));
vec4 hmt = texture(heightmap, l_texcoord0);
height = (hmt.x * MAP_HEIGHT_FACTOR);
// take the height from the heightmap
vtxPosWorld = vec4(vtxPosWorld.x, height, vtxPosWorld.z, vtxPosWorld.w);
lightDirection = vec4(normalize(vec4(sunPosition,1.0)) * viewMatrix).xyz;
viewVector = normalize((vec4(cameraPosition,1.0)*viewMatrix).xyz-(vtxPosWorld*viewMatrix).xyz);
l_Position = worldViewProjMatrix * vtxPosWorld;
}
fragment shader .
#version 330 compatibility
#define TERRAIN_SIZE 8192.0
#define HEIGHT_SCALE_FACTOR 50000
#define MAP_SCALE_FACTOR 100
#define M_PI 3.1415926535897932384626433832795
in vec2 l_texcoord0;
in vec4 vtxPosWorld;
in vec3 viewVector;
uniform vec3 sunPosition;
uniform vec3 cameraPosition;
uniform sampler2D heightmap;
float G1V(float dotP, float k)
{
return 1.0f/(dotP*(1.0f-k)+k);
}
float calcBRDF(vec3 normal, float fresnel, float MFD, vec3 sunColor) {
float F = fresnel;
vec3 Nn = normalize(normal.xyz);
vec3 Vn = viewVector;
vec3 Ln = lightDirection;
vec3 Hn = normalize(viewVector + lightDirection);
float NdotV = max(dot(Nn,Vn),0.0);
float NdotL = max(dot(Nn,Ln),0.0);
float NdotH = max(dot(Nn,Hn),0.1);
float VdotH = max(dot(Vn,Hn),0.0);
float LdotH = max(dot(Ln,Hn),0.0);
// Microfacet Distribution
float denom, alpha, beckmannD, GGXD;
float NdotHSqr = NdotH * NdotH;
float alphaSqr = MFD*MFD;
// GGX distribution (better performance)
denom = NdotHSqr * ( alphaSqr-1.0 ) + 1.0f;
GGXD = alphaSqr/(M_PI * pow(denom,2));
float k = MFD/2.0f;
float GGX = G1V(NdotL,k) * G1V(NdotV,k);
return GGXSpecular = F * GGXD * GGX;
}
float calcFresnel(float R) {
vec3 Hn = normalize(viewVector + lightDirection);
vec3 Vn = viewVector;
vec3 Ln = lightDirection;
float VdotH = dot(Vn,Hn);
float NdotL = dot(Hn,Vn);
float fresnel = R + (1-R)*pow((1-NdotL),5);
return fresnel;
}
vec3 calcNormal(sampler2D heightmap, vec2 texcoord) {
const vec2 size = vec2(MAP_SCALE_FACTOR,0.0);
vec3 off = ivec3(1,0,1)/TERRAIN_SIZE;
float hL = texture2D(heightmap, texcoord - off.xy).x*HEIGHT_SCALE_FACTOR;
float hR = texture2D(heightmap, texcoord + off.xy).x*HEIGHT_SCALE_FACTOR;
float hD = texture2D(heightmap, texcoord - off.yz).x*HEIGHT_SCALE_FACTOR;
float hU = texture2D(heightmap, texcoord + off.yz).x*HEIGHT_SCALE_FACTOR;
vec3 va = normalize(vec3(size.xy,(hL-hR)));
vec3 vb = normalize(vec3(size.yx,(hD-hU)));
return vec3(1.0,1.0,1.0);
return normalize(cross(va,vb)/2 + 0.5);
}
void main()
{
vec3 normal = calcNormal(heightmap, l_texcoord0);
float N = 1.69;
float microFacetDistribution = 1.5;
vec3 sunColor = vec3(1.0,1.0,1.0);
float Rfactor = calcFresnelReflectance(N);
float fresnel = calcFresnel(Rfactor);
float brdf = calcBRDF(normal,fresnel,microFacetDistribution,sunColor);
float conservedBrdf = clamp(brdf,0.0,fresnel);
gl_FragColor.rgb = vec4(0.5,0.5,0.5,1.0)*conservedBrdf;
}
I've tried using viewspace, worldspace etc.. It seems like a simple/silly problem, but I can't figure it out :|
Any suggestions appreciated..
Of course the answer was something silly.
First of all the normals were incorrect. That caused a skew in the light direction which made the light appear to be hitting on one direction only.
Secondly the light direction itself needed to be negated.

Parallax Mapping - GLSL- OpenGL

In the past few days I been trying to implement parallax mapping in my engine, but it doesn't seem to work, I have seen at least 15 examples, and I'm still not being able to get it to work
Here is an Image:
As you can see, all you can see is the base color, the height map is not there
Here are my shaders:
Fragment Shader
#version 330 core
uniform sampler2D DiffuseTextureSampler;
uniform sampler2D HeightTextureSampler;
vec2 scaleBias = vec2(0.5,0.5);
in vec3 EyeDirection_tangentspace;
in vec2 UV;
void main()
{
float height = texture2D(HeightTextureSampler, vec2 (UV.x, -UV.y)).r;
//Our heightmap only has one color channel.
float v = height * scaleBias.r - scaleBias.g;
vec3 eye = EyeDirection_tangentspace;
vec2 newCoords = UV + (eye.xy * v);
vec3 rgb = texture2D(DiffuseTextureSampler, vec2 (newCoords.x, -newCoords.y)).rgb;
gl_FragColor = vec4(rgb, 1.0);
}
Vertex Shader
#version 330 core
// Input vertex data, different for all executions of this shader.
layout(location = 0) in vec3 vertexPosition_modelspace;
layout(location = 1) in vec2 vertexUV;
layout(location = 2) in vec3 vertexNormal_modelspace;
layout(location = 3) in vec3 vertexTangent_modelspace;
layout(location = 4) in vec3 vertexBitangent_modelspace;
// Output data ; will be interpolated for each fragment.
out vec2 UV;
out vec3 Position_worldspace;
out vec3 EyeDirection_cameraspace;
out vec3 LightDirection_cameraspace;
out vec3 LightDirection_tangentspace;
out vec3 EyeDirection_tangentspace;
// Values that stay constant for the whole mesh.
uniform mat4 MVP;
uniform mat4 V;
uniform mat4 M;
uniform mat3 MV3x3;
uniform vec3 LightPosition_worldspace;
void main()
{
gl_Position = MVP * vec4(vertexPosition_modelspace,1);
Position_worldspace = (M * vec4(vertexPosition_modelspace,1)).xyz;
// Vector that goes from the vertex to the camera, in camera space.
// In camera space, the camera is at the origin (0,0,0).
vec3 vertexPosition_cameraspace = ( V * M * vec4(vertexPosition_modelspace,1)).xyz;
EyeDirection_cameraspace = vec3(0,0,0) - vertexPosition_cameraspace;
UV = vertexUV;
vec3 vertexTangent_cameraspace = MV3x3 * vertexTangent_modelspace;
vec3 vertexBitangent_cameraspace = MV3x3 * vertexBitangent_modelspace;
vec3 vertexNormal_cameraspace = MV3x3 * vertexNormal_modelspace;
mat3 TBNMatrix = transpose(mat3(vertexTangent_cameraspace, vertexBitangent_cameraspace, vertexNormal_cameraspace));
EyeDirection_tangentspace = Position_worldspace - vertexPosition_modelspace.xyz;
EyeDirection_tangentspace *= TBNMatrix;
}
couple things
set your scale to 1. no point in halving your hightscale if you cant see it at all.
(YOUR CURRENT PROBLEM) you are getting your texture coordinates with -UV.y Opengl does not have negative texture coordinates. getting negative will pull nothing from the texture, or worse a mirrored textured if you have tiling on.
(YOUR NEXT PROBLEM) normalize your eye vector before calculating new coordinates in the fragment. if you don't normalize, the XY coords of the vector are going to be HUGE so your new texture coordinates are MASSIVE offsets.
try these shaders. they are very simple and work. you will have to add lighting after you get the parallax working
Vertex shader
attribute vec3 tangent;
attribute vec3 binormal;
uniform vec3 CAMERA_POSITION;
varying vec3 eyeVec;
void main()
{
gl_Position = ftransform();
gl_TexCoord[0] = gl_TextureMatrix[0] * gl_MultiTexCoord0;
mat3 TBNMatrix = mat3(tangent, binormal, gl_Normal);
eyeVec = CAMERA_POSITION - gl_Vertex.xyz;
eyeVec *= TBNMatrix;
}
fragment shader
uniform sampler2D basetex;
uniform sampler2D heightMap;
uniform vec2 scaleBias;
varying vec3 eyeVec;
void main()
{
float height = texture2D(heightMap, gl_TexCoord[0].st).r;
float v = height * scaleBias.r - scaleBias.g;
vec3 eye = normalize(eyeVec);
vec2 newCoords = texCoord + (eye.xy * v);
vec3 rgb = texture2D(basetex, newCoords).rgb;
gl_FragColor = vec4(rgb, 1.0);
}

Same shaders behaving differently on Nvidia and ATI cards

Me and a friend are developing an editor (CAD-like) to use in our future game.
We are using the Qt framework and OpenGL.
The problem we are encountering is that on his laptop with an integrated nVidia card, the shading is working as expected and renders well. On my laptop with an integrated ATI card, as well as on my desktop with Radeon HD5850, the phong lighting is behaving slightly differently. There are more bright spots and dark spots and the image doesn't look good. Also, we are using a toon shader to draw a silhouette around the edges and to limit the amount of shades a color can have.
The toon shader uses 2-pass rendering - first pass renders the object in black, slightly larger than original (shifting each vertex in its normals direction slightly) to make the silhouette and then the second pass renders the object normally (only limiting the shade spectrum, so it looks more comic-like).
The images are of the same thing on our 2 computers. The first difference I mentioned above, the second is that the silhouette is stretched out as it should be on my friends computer, so it makes an even silhouette around the object, but is moved slightly up on my computer, making a thick line on the top and no line on the bottom.
The other thing is the phong lighting, illuminating the cube within which the object is edited. Again, rendering well on my friends computer, but being almost all-black or all-white on mine.
First image (nVidia card):
Second image (ATI cards):
I understand that the code is long and maybe the problem lays in some Qt settings, not in the shaders, but if you see anything that strikes you as bad practice, please answer.
Code for the phong shading follows
#version 400
in vec4 aVertex;
in vec4 aNormal;
in vec2 aTexCoord;
uniform mat4 uPVM;
uniform mat4 uViewModel;
uniform mat4 uNormal;
uniform int uLightsOn;
out vec2 vTexCoord;
out vec3 vNormal;
flat out vec3 mEye;
flat out vec3 mLightDirection;
flat out vec4 mAxisColor;
void main(void)
{
if(uLightsOn == 1) {
mEye = (uViewModel * aVertex).xyz;
mLightDirection = vec4(2.0,-2.0,1.0,0.0).xyz;
vNormal = (uNormal * aNormal).xyz;
}
gl_Position = uPVM * aVertex;
vTexCoord = aTexCoord;
mAxisColor = aNormal;
}
The phong fragment shader :
#version 400
uniform sampler2D uTexture0;
uniform int uLightsOn;
uniform vec3 uHighlightColor;
uniform int uTextured;
uniform int uAxisRender;
in vec2 vTexCoord;
in vec3 vNormal;
flat in vec3 mEye;
flat in vec3 mLightDirection;
out vec4 fragColor;
flat in vec4 mAxisColor;
struct TMaterial {
vec4 diffuse;
vec4 ambient;
vec4 specular;
float shininess;
};
TMaterial material;
void setup() {
// setupMaterials
material.ambient = vec4(0.4);
material.diffuse = vec4(0.9);
material.specular = vec4(0.0);
material.shininess = 0.3;
}
void main(void)
{
setup();
vec3 finalHighlightColor = uHighlightColor;
if(finalHighlightColor.x <= 0.0) finalHighlightColor.x = 0.1;
if(finalHighlightColor.y <= 0.0) finalHighlightColor.y = 0.1;
if(finalHighlightColor.z <= 0.0) finalHighlightColor.z = 0.1;
if(uLightsOn == 0) {
if(uAxisRender == 1) fragColor = mAxisColor;
else fragColor = vec4(finalHighlightColor,1.0);
return;
}
vec4 diffuse;
vec4 spec = vec4(0.0);
vec4 ambient;
vec3 L = normalize(mLightDirection - mEye);
vec3 E = normalize(-mEye);
vec3 R = normalize(reflect(-L,vNormal));
ambient = material.ambient;
float intens = max(dot(vNormal,L), 0.0);
diffuse = clamp( material.diffuse * intens , 0.0, 1.0 );
if(intens > 0.0) spec = clamp ( material.specular * pow(max(dot(R,E),0.0),material.shininess) , 0.0, 1.0 );
if(uTextured == 1) fragColor = (ambient + diffuse + spec) * texture(uTexture0,vTexCoord);
else fragColor = (ambient + diffuse + spec) * vec4(finalHighlightColor,1.0);
}
And the toon shaders :
#version 400
in vec4 aVertex;
in vec4 aNormal;
in vec2 aTexCoord;
uniform mat4 uPV;
uniform mat4 uM;
uniform mat4 uN;
uniform vec3 uLightPosition;
uniform vec3 uCameraPosition;
uniform int uSilhouetteMode;
uniform float uOffset;
// if this uniform is passed, all the toon rendering is going off and only simple axis are rendered
// last data in aNormal are colors of those axis if everything was ser properly.
uniform int uAxisRendering;
flat out vec4 fAxisColor;
out vec4 vNormal;
out vec2 vTexCoord;
out vec3 vDirectionToCamera;
out vec3 vDirectionToLight;
void silhouetteMode() {
gl_Position = uPV * uM * vec4(aVertex.xyz + aNormal.xyz * uOffset,1.0f);
}
void toonMode() {
vec4 worldPosition = uM * aVertex;
vDirectionToCamera = uCameraPosition - worldPosition.xyz;
vDirectionToLight = uLightPosition - worldPosition.xyz;
vNormal = uN * aNormal;
gl_Position = uPV * worldPosition;
}
void axisMode() {
fAxisColor = aNormal;
gl_Position = uPV * uM * aVertex;
}
void main(void)
{
vTexCoord = aTexCoord;
if(uSilhouetteMode == 1) {
silhouetteMode();
} else {
if(uAxisRendering == 1) axisMode();
else toonMode();
}
}
and the fragment shader
#version 400
uniform sampler2D uTexture;
uniform vec3 uBaseColor;
uniform float uNumShades;
uniform int uSilhouetteMode;
uniform int uAxisRendering;
flat in vec4 fAxisColor;
in vec4 vNormal;
in vec2 vTexCoord;
in vec3 vDirectionToCamera;
in vec3 vDirectionToLight;
out vec4 outFragColor;
void main(void)
{
if(uSilhouetteMode == 1) {
outFragColor = vec4(uBaseColor,1.0);
return;
}
if(uAxisRendering == 1) {
outFragColor = fAxisColor;
return;
}
float l_ambient = 0.1;
float l_diffuse = clamp(dot(vDirectionToLight,vNormal.xyz),0.0,1.0);
float l_specular;
vec3 halfVector = normalize(vDirectionToCamera + vDirectionToLight);
if(dot(vDirectionToLight,vNormal.xyz) > 0.0) {
l_specular = pow(clamp(dot(halfVector,vNormal.xyz),0.0,1.0),64.0);
} else {
l_specular = 0.0;
}
float intensity = l_ambient + l_diffuse + l_specular;
float shadeIntesity = ceil(intensity * uNumShades)/ uNumShades;
outFragColor = vec4(texture(uTexture,vTexCoord).xyz * shadeIntesity * uBaseColor,1.0);
}
And finally, our OpenGLWindow initialization (in Qt)
OpenGLWindow::OpenGLWindow(QWindow *parent) :
QWindow(parent),m_animating(false), m_initialized(false), m_animationTimer(NULL)
{
setSurfaceType(QWindow::OpenGLSurface);
QSurfaceFormat format;
format.setDepthBufferSize( 24 );
format.setMajorVersion( 4 );
format.setMinorVersion( 0 );
format.setSamples( 4 );
format.setProfile( QSurfaceFormat::CoreProfile );
setFormat( format );
create();
if(!m_context) {
m_context = new QOpenGLContext(this);
m_context->setFormat(requestedFormat());
m_context->create();
m_context->makeCurrent(this);
initializeOpenGLFunctions();
}
m_animationTimer = new QTimer(this);
connect(m_animationTimer, SIGNAL(timeout()), this, SLOT(renderer()));
m_animationTimer->setInterval(16);
}
To my eyes the nVidia image seems to be using alpha whereas the AMD one is not. I also can't see a
format.setAlpha(true);
in your Qt setup so may be that.

GLSL Parallax mapping issue

I wrote parallax mapping shader in GLSL but it is working wrong. In some places it shows correct image with good bump but in other positions it becomes flat. It's became most flat when I moving camera father than platform(forward)
#version 120
varying vec3 pos;
varying vec3 normal;
varying vec2 tc;
varying vec3 color;
uniform vec3 camera;
void main(void)
{
pos = (-gl_Vertex.xyz + camera);
tc = gl_MultiTexCoord0.xy;
normal = normalize(gl_Normal);
color = gl_Color.xyz;
gl_Position = ftransform();
}
#version 120
uniform sampler2D normal_map;
uniform sampler2D diffuse_map;
uniform sampler2D displacement;
uniform float mode;
varying vec2 tc;
varying vec3 pos;
void main(void)
{
vec3 lightPos = vec3( 0.0, -45.0, -40.0);
vec4 color;
if (mode > 1)
{
vec3 eyeVec = normalize(pos);
vec2 eyeProj = normalize(eyeVec.xz);
float curHeight = texture2D(displacement, tc).r - 0.5;
vec2 trTc = tc - (eyeProj) * curHeight * 0.035;
vec3 normal = normalize(texture2D(normal_map, trTc).xzy*2.0 - 1.0);
color = texture2D(diffuse_map, trTc) * dot( normalize(pos - lightPos), normal) * 1.0;
color.w = 1.0;
}
else
{
vec3 normal = normalize(texture2D(normal_map, tc).xzy*2.0 - 1.0);
color = texture2D(diffuse_map, tc) * dot( normalize(pos - lightPos), normal) * 1.0;
}
gl_FragColor = color;
}
I can't even imagine where can be mistake. I was experimenting with camera and vertex values but it is not help;