Vertex attribute is not being interpolated at all - c++

I assign each vertex a unique set of coordinates and i am expecting the fragment in fragment shader to have interpolated value for that varying variable. Problem is, in fragment shader value always remains 0.0. How can i fix that. I am using opengles 2.0 with glsl version 1.2.
Vertex shader:
attribute vec4 Position;
attribute vec4 SourceColor;
attribute vec3 BaryCentricCoord;
varying vec4 DestinationColor;
uniform mat4 Projection;
uniform mat4 Modelview;
attribute vec2 TexCoordIn;
varying vec2 TexCoordOut;
varying vec3 vBC;
void main() {
vBC = BaryCentricCoord;
DestinationColor = SourceColor;
gl_Position = Projection * Modelview * Position;
TexCoordOut = TexCoordIn;
};
Fragment shader:
varying vec4 DestinationColor;
varying vec2 TexCoordOut;
varying vec3 vBC;
uniform sampler2D Texture;
uniform int TexEnabled;
float edgeFactor();
void main() {
if (TexEnabled == 1) {
gl_FragColor = texture2D(Texture, TexCoordOut) * DestinationColor;
} else{
gl_FragColor = vec4(DestinationColor.xyz, edgeFactor());
}
}
float edgeFactor() {
vec3 d = fwidth(vBC);
vec3 a3 = smoothstep(vec3(0.0), d*1.5, vBC);
return min(min(a3.x, a3.y), a3.z);
};
Vertex data:
float vertices[] {
p1.x - deltaX, p1.y + deltaY, 0.0f, //top-left
p2.x - deltaX, p2.y + deltaY, 0.0f, //top-right
p2.x + deltaX, p2.y - deltaY, 0.0f, //bottom-right
p2.x + deltaX, p2.y - deltaY, 0.0f, //bottom-right
p1.x + deltaX, p1.y - deltaY, 0.0f, //bottom-left
p1.x - deltaX, p1.y + deltaY, 0.0f //top-left
};
Barycentric coordinates i am passing (each set of 3 corresponds to a vertex):
// R
data[0] = 1.0;
data[1] = 0.0;
data[2] = 0.0;
// G
data[3] = 0.0;
data[4] = 1.0;
data[5] = 0.0;
// B
data[6] = 0.0;
data[7] = 0.0;
data[8] = 1.0;
// B
data[9] = 0.0;
data[10] = 0.0;
data[11] = 1.0;
// G
data[12] = 0.0;
data[13] = 1.0;
data[14] = 0.0;
// R
data[15] = 1.0;
data[16] = 0.0;
data[17] = 0.0;
Enabled :
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable( GL_BLEND );
Passing data as: glDrawArrays(GL_TRIANGLES, 0, 6);

Related

Compute Normals After Vertex Deformation?

I am coding a vertex and a fragment shader trying to distort the surface of some water and then computing blinn-phong lighting on the surface. I am able to successfully compute the deformed matrices with a simple noise function, but how can I find the distorted normals? Since it isn't a linear transformation I am stuck, could anyone help?
Here are the relevant files:
vertex shader:
#version 150
uniform mat4 u_Model;
uniform mat4 u_ModelInvTr;
uniform mat4 u_ViewProj;
uniform vec4 u_Color;
uniform int u_Time;
in vec4 vs_Pos; // The array of vertex positions passed to the shader
in vec4 vs_Nor; // The array of vertex normals passed to the shader
in vec4 vs_Col; // The array of vertex colors passed to the shader.
in vec2 vs_UV; // UV coords for texture to pass thru to fragment shader
in float vs_Anim; // 0.f or 1.f To pass thru to fragment shader
in float vs_T2O;
out vec4 fs_Pos;
out vec4 fs_Nor;
out vec4 fs_LightVec;
out vec4 fs_Col;
out vec2 fs_UVs;
out float fs_Anim;
out float fs_dimVal;
out float fs_T2O;
uniform vec4 u_CamPos;
out vec4 fs_CamPos;
const vec4 lightDir = normalize(vec4(0.0, 1.f, 0.0, 0));
mat4 rotationMatrix(vec3 axis, float angle) {
axis = normalize(axis);
float s = sin(angle);
float c = cos(angle);
float oc = 1.0 - c;
return mat4(oc * axis.x * axis.x + c, oc * axis.x * axis.y - axis.z * s, oc * axis.z * axis.x + axis.y * s, 0.0, oc * axis.x * axis.y + axis.z * s, oc * axis.y * axis.y + c, oc * axis.y * axis.z - axis.x * s, 0.0,oc * axis.z * axis.x - axis.y * s, oc * axis.y * axis.z + axis.x * s, oc * axis.z * axis.z + c, 0.0, 0.0, 0.0, 0.0, 1.0);
}
vec4 rotateLightVec(float deg, vec4 LV) {
mat4 R = rotationMatrix(vec3(0,0,1), deg);
return R * LV;
}
float random1(vec3 p) {
return fract(sin(dot(p, vec3(127.1, 311.7, 191.999)))*43758.5453);
}
vec3 random2( vec3 p ) {
return fract( sin( vec3(dot(p, vec3(127.1, 311.7, 58.24)), dot(p, vec3(269.5, 183.3, 657.3)), dot(p, vec3(420.69, 69.420, 469.20))) ) * 43758.5453);
}
void main()
{
fs_Col = vs_Col;
fs_UVs = vs_UV;
fs_Anim = vs_Anim;
fs_T2O = vs_T2O;
mat3 invTranspose = mat3(u_ModelInvTr);
fs_Nor = vec4(invTranspose * vec3(vs_Nor), 0);
vec4 modelposition = u_Model * vs_Pos;
if (vs_Anim != 0) { // if we want to animate this surface
// check region in texture to decide which animatable type is drawn
bool lava = fs_UVs.x >= 13.f/16.f && fs_UVs.y < 2.f/16.f;
bool water = !lava && fs_UVs.x >= 13.f/16.f && fs_UVs.y <= 4.f/16.f;
if (water) {
// define an oscillating time so that model can transition back and forth
float t = (cos(u_Time * 0.05) + 1)/2; // u_Time increments by 1 every frame. Domain [0,1]
vec3 temp = random2(vec3(modelposition.x, modelposition.y, modelposition.z)); // range [0, 1]
temp = (temp - 0.5)/25; // [0, 1/scalar]
modelposition.x = mix(modelposition.x - temp.x, modelposition.x + temp.x, t);
modelposition.y = mix(modelposition.y - temp.y, modelposition.y + 3*temp.y, t);
modelposition.z = mix(modelposition.z - temp.z, modelposition.z + temp.z, t);
} else if (lava) {
// define an oscillating time so that model can transition back and forth
float t = (cos(u_Time * 0.01) + 1)/2; // u_Time increments by 1 every frame. Domain [0,1]
vec3 temp = random2(vec3(modelposition.x, modelposition.y, modelposition.z)); // range [0, 1]
temp = (temp - 0.5)/25; // [0, 1/scalar]
modelposition.x = mix(modelposition.x - temp.x, modelposition.x + temp.x, t);
modelposition.y = mix(modelposition.y - temp.y, modelposition.y + 3*temp.y, t);
modelposition.z = mix(modelposition.z - temp.z, modelposition.z + temp.z, t);
}
}
fs_dimVal = random1(modelposition.xyz/100.f);
fs_LightVec = rotateLightVec(0.001 * u_Time, lightDir); // Compute the direction in which the light source lies
fs_CamPos = u_CamPos; // uniform handle for the camera position instead of the inverse
fs_Pos = modelposition;
gl_Position = u_ViewProj * modelposition;// gl_Position is a built-in variable of OpenGL which is
// used to render the final positions of the geometry's vertices
}
fragment shader:
#version 330
uniform vec4 u_Color; // The color with which to render this instance of geometry.
uniform sampler2D textureSampler;
uniform int u_Time;
uniform mat4 u_ViewProj;
uniform mat4 u_Model;
in vec4 fs_Pos;
in vec4 fs_Nor;
in vec4 fs_LightVec;
in vec4 fs_Col;
in vec2 fs_UVs;
in float fs_Anim;
in float fs_T2O;
in float fs_dimVal;
out vec4 out_Col;
in vec4 fs_CamPos;
float random1(vec3 p) {
return fract(sin(dot(p,vec3(127.1, 311.7, 191.999)))
*43758.5453);
}
float random1b(vec3 p) {
return fract(sin(dot(p,vec3(169.1, 355.7, 195.999)))
*95751.5453);
}
float mySmoothStep(float a, float b, float t) {
t = smoothstep(0, 1, t);
return mix(a, b, t);
}
float cubicTriMix(vec3 p) {
vec3 pFract = fract(p);
float llb = random1(floor(p) + vec3(0,0,0));
float lrb = random1(floor(p) + vec3(1,0,0));
float ulb = random1(floor(p) + vec3(0,1,0));
float urb = random1(floor(p) + vec3(1,1,0));
float llf = random1(floor(p) + vec3(0,0,1));
float lrf = random1(floor(p) + vec3(1,0,1));
float ulf = random1(floor(p) + vec3(0,1,1));
float urf = random1(floor(p) + vec3(1,1,1));
float mixLoBack = mySmoothStep(llb, lrb, pFract.x);
float mixHiBack = mySmoothStep(ulb, urb, pFract.x);
float mixLoFront = mySmoothStep(llf, lrf, pFract.x);
float mixHiFront = mySmoothStep(ulf, urf, pFract.x);
float mixLo = mySmoothStep(mixLoBack, mixLoFront, pFract.z);
float mixHi = mySmoothStep(mixHiBack, mixHiFront, pFract.z);
return mySmoothStep(mixLo, mixHi, pFract.y);
}
float fbm(vec3 p) {
float amp = 0.5;
float freq = 4.0;
float sum = 0.0;
for(int i = 0; i < 8; i++) {
sum += cubicTriMix(p * freq) * amp;
amp *= 0.5;
freq *= 2.0;
}
return sum;
}
void main()
{
vec4 diffuseColor = texture(textureSampler, fs_UVs);
bool apply_lambert = true;
float specularIntensity = 0;
if (fs_Anim != 0) {
// check region in texture to decide which animatable type is drawn
bool lava = fs_UVs.x >= 13.f/16.f && fs_UVs.y < 2.f/16.f;
bool water = !lava && fs_UVs.x >= 13.f/16.f && fs_UVs.y < 4.f/16.f;
if (lava) {
// slowly gyrate texture and lighten and darken with random dimVal from vert shader
vec2 movingUVs = vec2(fs_UVs.x + fs_Anim * 0.065/16 * sin(0.01*u_Time),
fs_UVs.y - fs_Anim * 0.065/16 * sin(0.01*u_Time + 3.14159/2));
diffuseColor = texture(textureSampler, movingUVs);
vec4 warmerColor = diffuseColor + vec4(0.3, 0.3, 0, 0);
vec4 coolerColor = diffuseColor - vec4(0.1, 0.1, 0, 0);
diffuseColor = mix(warmerColor, coolerColor, 0.5 + fs_dimVal * 0.65*sin(0.02*u_Time));
apply_lambert = false;
} else if (water) {
// blend between 3 different points in texture to create a wavy subtle change over time
vec2 offsetUVs = vec2(fs_UVs.x - 0.5f/16.f, fs_UVs.y - 0.5f/16.f);
diffuseColor = texture(textureSampler, fs_UVs);
vec4 altColor = texture(textureSampler, offsetUVs);
altColor.x += fs_dimVal * pow(altColor.x+.15, 5);
altColor.y += fs_dimVal * pow(altColor.y+.15, 5);
altColor.z += 0.5 * fs_dimVal * pow(altColor.z+.15, 5);
diffuseColor = mix(diffuseColor, altColor, 0.5 + 0.35*sin(0.05*u_Time));
offsetUVs -= 0.25f/16.f;
vec4 newColor = texture(textureSampler, offsetUVs);
diffuseColor = mix(diffuseColor, newColor, 0.5 + 0.5*sin(0.025*u_Time)) + fs_dimVal * vec4(0.025);
diffuseColor.a = 0.7;
// ----------------------------------------------------
// Blinn-Phong Shading
// ----------------------------------------------------
vec4 lightDir = normalize(fs_LightVec - fs_Pos);
vec4 viewDir = normalize(fs_CamPos - fs_Pos);
vec4 halfVec = normalize(lightDir + viewDir);
float shininess = 400.f;
float specularIntensity = max(pow(dot(halfVec, normalize(fs_Nor)), shininess), 0);
}
}
// Calculate the diffuse term for Lambert shading
float diffuseTerm = dot(normalize(fs_Nor), normalize(fs_LightVec));
// Avoid negative lighting values
diffuseTerm = clamp(diffuseTerm, 0, 1);
float ambientTerm = 0.3;
float lightIntensity = diffuseTerm + ambientTerm; //Add a small float value to the color multiplier
//to simulate ambient lighting. This ensures that faces that are not
//lit by our point light are not completely black.
vec3 col = diffuseColor.rgb;
// Compute final shaded color
if (apply_lambert) {
col = col * lightIntensity + col * specularIntensity;
}
// & Check the rare, special case where we draw face between two diff transparent blocks as opaque
if (fs_T2O != 0) {
out_Col = vec4(col, 1.f);
} else {
out_Col = vec4(col, diffuseColor.a);
}
// distance fog!
vec4 fogColor = vec4(0.6, 0.75, 0.9, 1.0);
float FC = gl_FragCoord.z / gl_FragCoord.w / 124.f;
float falloff = clamp(1.05 - exp(-1.05f * (FC - 0.9f)), 0.f, 1.f);
out_Col = mix(out_Col, fogColor, falloff);
}
I tried implementing blinn-phong in the fragment shader, but I think it is wrong simple from the wrong normals. I think this can be done with some sort of tangent and cross product solution, but how can I know the tangent of the surface given we only know the vertex position?
I am not using unity, just bare c++ and most of the answers I am finding online are for java or unity which I do not understand.`

Volumetric Light not rendering volume correctly

I've been following this tutorial (https://www.programmersought.com/article/68075912719/) to get volumetric light. But I am not getting the correct output.
The shadow volume is incorrectly rendered and I am not sure what I am doing wrong. My vertex and fragment shader looks exactly like the tutorial but still I'm not getting correct output.
Here is the vertex shader code
#version 450 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec2 texCoord;
layout (location = 2) in vec3 normal;
uniform mat4 model;
uniform mat4 vp;
uniform float zFar;
uniform float fov;;
uniform float aspectRatio;
out vec2 TexCoord;
out vec2 farPlanePos;
void main(void){
gl_Position = vec4(position, 1.0);
TexCoord = vec2 (texCoord.x, 1 - texCoord.y);
float t = tan(fov/2);
farPlanePos.x = (TexCoord.x * 2 - 1) * zFar * t * aspectRatio;
farPlanePos.y = (TexCoord.y * 2 - 1) * zFar * t;
}
And Here is the fragment shader code
#version 450 core
in vec2 TexCoord;
uniform vec3 cameraPos;
uniform vec3 lightPos;
uniform vec3 lightColor;
uniform mat4 invViewMatrix;
uniform mat4 invProjectionMatrix;
uniform float ambientStrength;
uniform sampler2D gPosition;
uniform sampler2D gNormal;
uniform sampler2D gAlbedoSpec;
uniform sampler2D gDepth;
uniform sampler2D shadowMapTexture;
in vec2 farPlanePos;
uniform float zFar;
uniform float zNear;
float g = 0.0f;
uniform mat4 lightViewMatrix;
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
vec3 yellow_light = vec3(1,198.0/255.0,107.0/255.0);
out vec4 finalColor;
// use linear z depth
vec3 ComputeWorldPos(float depth){
vec4 pos = vec4(vec3(farPlanePos.x, farPlanePos.y, -zFar) * depth , 1.0f);
vec4 ret = invViewMatrix * pos;
return ret.xyz / ret.w;
}
bool IsInShadow(vec4 worldPos){
float fShadow = 0.0;
vec4 lightPos = (lightViewMatrix * (worldPos));
float fDistance = -lightPos.z / zFar;
lightPos = projectionMatrix * lightPos;
vec2 uv = lightPos.xy / lightPos.w * 0.5 + vec2(0.5f, 0.5f);
uv.x = clamp(uv.x, 0.0f, 1.0f);
uv.y = clamp(uv.y, 0.0f, 1.0f);
float offset = 0.5f/zFar;
float distanceMap = texture2D(shadowMapTexture, uv).r;
return fDistance - offset > distanceMap;
}
void main(void){
float depth = texture2D(gDepth, TexCoord).w;
vec3 total_light;
// volume light
{
float I = 0.0f;
float d = depth * zFar;
int virtual_plane_num = 100;
int begin = int(virtual_plane_num * zNear / (d - zNear));
int end = int(virtual_plane_num * (zFar - d) / (d - zNear));
for(int j = begin; j <= virtual_plane_num + begin; j++)
{
float z = 1.0f * j / (begin + virtual_plane_num + end);
vec3 pos = ComputeWorldPos(z);
if(z < depth && !IsInShadow(vec4(pos,1.0f)))
{
//vec3 lightDis = pos - lightPos;
//vec3 viewDis = pos - cameraPos;
//float lightDis2 = lightDis.x * lightDis.x + lightDis.y * lightDis.y + lightDis.z * lightDis.z;
vec3 lightDir = normalize(pos - lightPos);
vec3 viewDir = normalize(pos - cameraPos);
float cosTheta = dot(lightDir,normalize(-lightPos));
float hg = 1.0f/(4.0f*3.14f)* (1.0f - g*g)/ pow(1.0f + g * g - 2.0f * g * dot(lightDir,-viewDir), 1.5f);
if(cosTheta >0.9){
I += clamp(10 * hg / virtual_plane_num, 0.0f, 1.0f);
}
}
}
I = clamp(I , 0.0f,1.0f);
total_light += I * yellow_light;
}
vec3 normal = texture2D(gNormal, TexCoord).xyz * 2 - 1; //result.xyz * 2 - 1;
vec3 worldPos = ComputeWorldPos(depth);
// parallel lights
/*
{
vec3 ViewDir = normalize( cameraPos - worldPos );
vec3 lightDir = normalize(vec3(0.5,1,0.2) );
vec3 halfDir = normalize(lightDir + ViewDir);
float diffuse = 0.3 * clamp(dot(normal, lightDir), 0, 1) ;
vec3 reflectDir = normalize(reflect(-lightDir,normal));
float specular = 0.3 * pow(clamp(dot(reflectDir,halfDir),0,1),50.0);
vec3 color = (diffuse + specular) *vec3(1,1,1);
total_light += color;
}
*/
vec3 color = vec3(texture2D(gAlbedoSpec,TexCoord));
float ambient = 0.1;
finalColor = vec4(total_light + ambient * color,1);
}
So you can see the vertex and fragment shader code is exactly like the blog, but still the output is different.
Unfortunately it doesn't say how to contact the blogger otherwise I would have asked them directly. So the next best option is stockoverflow, so I am asking here.
Ok after 2 days I was able to fix the issue. I think the isInShadow function is incorrect as it always perspective divies by zFar and also doesnt multiply by projection matrix before getting current depth.
So I replaced the code with learnOpengl shadow calculation as below.
bool IsInShadow(vec4 worldPos){
vec4 lightPos = (lightViewMatrix * (worldPos));
//float fDistance = -lightPos.z/ zFar;
lightPos = projectionMatrix * lightPos;
vec3 projCoords = lightPos.xyz / lightPos.w ;
projCoords = projCoords* 0.5 + 0.5f;
//uv.x = clamp(uv.x, 0.0f, 1.0f);
//uv.y = clamp(uv.y, 0.0f, 1.0f);
float offset = 0.5f/zFar;
float distanceMap = texture2D(shadowMapTexture, projCoords.xy).r;
return projCoords.z - offset > distanceMap;
}
And now the code works!!

OpenGL GLSL - Projection matrix not working

I've got a basic OpenGL application and I want to use my projection matrix.
This is my matrix:
WorldCoordinates.m[0][0] = 2.0f / Width - 1.0f; WorldCoordinates.m[0][1] = 0; WorldCoordinates.m[0][2] = 0, WorldCoordinates.m[0][3] = 0;
WorldCoordinates.m[1][0] = 0; WorldCoordinates.m[1][1] = 2.0f / Height - 1.0f; WorldCoordinates.m[1][2] = 0, WorldCoordinates.m[1][3] = 0;
WorldCoordinates.m[2][0] = 0; WorldCoordinates.m[2][1] = 0; WorldCoordinates.m[2][2] = 0, WorldCoordinates.m[2][3] = 0;
WorldCoordinates.m[3][0] = 0; WorldCoordinates.m[3][1] = 0; WorldCoordinates.m[3][2] = 0, WorldCoordinates.m[3][3] = 0;
(WorldCoordinates is the Matrix4 struct that contains just a variable called m that is a float[4][4])(Width and Height are two ints).
I then apply this coordinates to my vertex shader using this:
shader.Bind();
glUniformMatrix4fv(glGetUniformLocation(shader.GetProgramID(), "worldCoordinates"), 1, GL_TRUE, &WorldCoordinates.m[0][0]);
(Shader is a class and has got a Bind() method that is just glUseProgram).
This is my Vertex Shader GLSL
#version 330 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec3 color;
layout (location = 2) in vec2 texCoord;
out vec3 Color;
out vec2 TexCoord;
uniform mat4 worldCoordinates;
void main()
{
gl_Position = worldCoordinates * vec4(position, 1.0f);
Color = color;
TexCoord = texCoord;
}
Using this, it doesn't work. But changing the gl_Position call to this:
gl_Position = vec4(vec3(position.x * 1/400 -1, position.y * 1/300 -1, 1.0f), 1.0f);
it renders as expected. Why is that?
This is how you build a orthogonal projection matrix :
static void
mat4_ortho(mat4_t m, float left, float right, float bottom, float top, float near, float far)
{
float rl = right - left;
float tb = top - bottom;
float fn = far - near;
mat4_zero(m);
m[ 0] = 2.0f / rl;
m[ 5] = 2.0f / tb;
m[10] = -2.0f / fn;
m[12] = -(left + right) / rl;
m[13] = -( top + bottom) / tb;
m[14] = -( far + near) / fn;
m[15] = 1.0f;
}
For you case, you'd set left=0, right=width, bottom=0, top=height, near and far don't matter, just set -1.0 and 1.0 for instance.
With such a matrix, the vertex coordinates you use for drawing will map 1:1 with the pixels on screen.

LWJGL Objects not shown on ATI graphics card

I've written an LWJGL application that uses .obj files, reads them and displays them (using displaylists).
On my nvidia graphics card, everything runs fine. But on an amd graphics card i can't see the objects.
How i give data to the shaders:
glUseProgram(shaderEngine.obj);
glUniform1i(glGetUniformLocation(shaderEngine.obj, "inOrangeJuice"), inOrangeJuice ? 1 : 0);
shaderEngine.loadMatrix(glGetUniformLocation(shaderEngine.standard, "projectionMatrix"), camera.projectionMatrix);
shaderEngine.loadMatrix(glGetUniformLocation(shaderEngine.obj, "viewMatrix"), camera.viewMatrix);
ModelMatrix is loaded:
shaderEngine.createModelMatrix(new Vector3f(x, y, z), new Vector3f(rx, ry, rz), new Vector3f(1, 1, 1));
shaderEngine.loadModelMatrix(shaderEngine.obj);
Fragment Shader:
#version 130
uniform sampler2D tex;
uniform vec2 texCoord[4];
float textureSize;
float texelSize;
uniform int inOrangeJuice;
bool pointInTriangle(vec3 P, vec3 A, vec3 B, vec3 C)
{
vec3 u = B - A;
vec3 v = C - A;
vec3 w = P - A;
vec3 vCrossW = cross(v, w);
vec3 vCrossU = cross(v, u);
if(dot(vCrossW, vCrossU) < 0)
{
return false;
}
vec3 uCrossW = cross(u, w);
vec3 uCrossV = cross(u, v);
if(dot(uCrossW, uCrossV) < 0)
{
return false;
}
float denom = length(uCrossV);
float r = length(vCrossW);
float t = length(uCrossW);
return (r + t <= 1);
}
vec4 texture2DBilinear(sampler2D textureSampler, vec2 uv)
{
vec4 tl = texture2D(textureSampler, uv);
vec4 tr = texture2D(textureSampler, uv + vec2(texelSize, 0));
vec4 bl = texture2D(textureSampler, uv + vec2(0, texelSize));
vec4 br = texture2D(textureSampler, uv + vec2(texelSize , texelSize));
vec2 f = fract( uv.xy * textureSize );
vec4 tA = mix( tl, tr, f.x );
vec4 tB = mix( bl, br, f.x );
return mix( tA, tB, f.y );
}
void main()
{
ivec2 textureSize2d = textureSize(tex,0);
textureSize = float(textureSize2d.x);
texelSize = 1.0 / textureSize;
//texture coordinate:
vec2 texCoord = (gl_TexCoord[0].st);
bool inOJ = false;
if(inOrangeJuice == 1)
{
float depth = gl_FragCoord.z / gl_FragCoord.w;//works only with perspective projection
depth = depth / 6;
if(depth > 1)
{
depth = 1;
}
inOJ = true;
gl_FragColor = texture2DBilinear(tex, texCoord) * gl_Color * (1.0 - depth) + vec4(1.0, 0.5, 0.0, 1.0) * depth;
}
if(inOJ == false)
{
gl_FragColor = texture2DBilinear(tex, texCoord) * gl_Color;
}
//Nothing is shown, inOrangeJuice should be 0
//gl_FragColor = vec4(inOrangeJuice,0,0,1);
//Always works:
//gl_FragColor = texture2D(tex, texCoord) * gl_Color;
}

Problems with drawing billboards

I am currently trying to draw billboards and some geometry with "modern opengl approach". Problem is that I cannot force billboards to keep their positions in space.
I need to link text positions with positions of another objects. Coordinates of text position are (3,3,3) and same coordinates has end of black line. In some positions I have exactly what I need: text is drawn at the end of line, but in some - it is too far from the end of line.
My render code:
public void Draw()
{
//Set up matrices
projectionMatrix = Matrix4.CreateOrthographic(_width, _height, -10000, 10000);
modelMatrix = Matrix4.Identity;
viewMatrix = Matrix4.CreateRotationY((float)xrot) *
Matrix4.CreateRotationX((float)yrot) *
Matrix4.CreateScale((float)scale);
var viewPort = new Rectangle(-(_width / 2), -(_height / 2), _width, _height);
var viewportTransformationMatrix = ComputeViewportTransformationMatrix(viewPort, -100, 100);
var viewportOrthographicMatrix = ComputeViewportOrthographicMatrix(viewPort);
worldViewProj = modelMatrix * viewMatrix * projectionMatrix;
//DRAW AXISES
GL.UseProgram(axisesProgramID);
axisesProgram.Uniforms["worldViewProj"].SetValue(worldViewProj);
axisesVAO.Bind();
for (int i = 0; i < 4; i++)
{
GL.DrawArrays(PrimitiveType.Lines, i * 2, 2);
}
//DRAW TEXT WITH PRE-CREATED TEXTURE
GL.UseProgram(textProgramID);
GL.ActiveTexture(TextureUnit.Texture0);
GL.BindTexture(TextureTarget.Texture2D, textureID);
//set-up uniforms
textProgram.Uniforms["og_viewportOrthographicMatrix"].SetValue(viewportOrthographicMatrix);
textProgram.Uniforms["og_viewportTransformationMatrix"].SetValue(viewportTransformationMatrix);
textProgram.Uniforms["Position"].SetValue(new float[] { 3.0f, 3.0f, 3.0f });
textProgram.Uniforms["projectionMatrix"].SetValue(projectionMatrix);
textProgram.Uniforms["modelViewMatrix"].SetValue(modelViewMatrix);
textProgram.Uniforms["og_texture0"].SetValue(0);
GL.DrawArrays(PrimitiveType.Points, 0, 1);
GL.BindTexture(TextureTarget.Texture2D, 0);
}
public Matrix4 ComputeViewportTransformationMatrix(Rectangle viewport, float nearDepthRange, float farDepthRange)
{
double halfWidth = viewport.Width * 0.5;
double halfHeight = viewport.Height * 0.5;
double halfDepth = (farDepthRange - nearDepthRange) * 0.5;
//
// Bottom and top swapped: MS -> OpenGL
//
return new Matrix4(
(float)halfWidth, 0.0f, 0.0f, (float)viewport.Left + (float)halfWidth,
0.0f, (float)halfHeight, 0.0f, (float)viewport.Top + (float)halfHeight,
0.0f, 0.0f, (float)halfDepth, (float)nearDepthRange + (float)halfDepth,
0.0f, 0.0f, 0.0f, 1.0f);
}
public static Matrix4 ComputeViewportOrthographicMatrix(Rectangle viewport)
{
//
// Bottom and top swapped: MS -> OpenGL
//
return Matrix4.CreateOrthographicOffCenter(
(float)viewport.Left, (float)viewport.Right,
(float)viewport.Top, (float)viewport.Bottom,
0.0f, 1.0f);
}
My axises shaders are really simple path-through.
//VERTEX SHADER
#version 150 core
in vec3 in_Position;
in vec3 in_Color;
out vec4 color;
uniform mat4 worldViewProj;
void main(void) {
gl_Position = worldViewProj * vec4(in_Position, 1.0);
color = vec4(in_Color, 1.0f);
}
//FRAGMENT SHADER
#version 150 core
in vec4 color;
out vec4 out_Color;
void main(void)
{
out_Color = color;
}
Here are text (texture) shaders:
//VERTEX SHADER
#version 330
out float gsOrigin;
out vec2 gsPixelOffset;
uniform mat4 projectionMatrix;
uniform mat4 modelViewMatrix;
uniform mat4 og_viewportTransformationMatrix;
uniform float origin = 6; // TODO: Why does this not work when float is int?
uniform vec2 pixelOffset = vec2(0,0);
uniform vec3 Position;
vec4 ModelToWindowCoordinates(
vec4 v,
mat4 modelViewPerspectiveMatrix,
mat4 viewportTransformationMatrix)
{
v = modelViewPerspectiveMatrix * v; // clip coordinates
v.xyz /= v.w; // normalized device coordinates
v.xyz = (viewportTransformationMatrix * vec4(v.xyz, 1.0)).xyz; // window coordinates
return v;
}
void main()
{
gl_Position = ModelToWindowCoordinates ( vec4(Position, 1.0f) , modelViewMatrix * projectionMatrix , og_viewportTransformationMatrix ) ;
gsOrigin = origin;
gsPixelOffset = pixelOffset;
}
//GEOMETRY SHADER
#version 330
layout(points) in;
layout(triangle_strip, max_vertices = 4) out;
in float gsOrigin[];
in vec2 gsPixelOffset[];
out vec2 fsTextureCoordinates;
uniform sampler2D og_texture0;
uniform float og_highResolutionSnapScale;
uniform mat4 og_viewportOrthographicMatrix;
void main()
{
float originScales[3] = float[](0.0, 1.0, -1.0);
vec2 halfSize = vec2(textureSize(og_texture0, 0)) * 0.5 * og_highResolutionSnapScale;
vec4 center = gl_in[0].gl_Position;
int horizontalOrigin = int(gsOrigin[0]) & 3; // bits 0-1
int verticalOrigin = (int(gsOrigin[0]) & 12) >> 2; // bits 2-3
center.xy += (vec2(originScales[horizontalOrigin], originScales[verticalOrigin]) * halfSize);
center.xy += (gsPixelOffset[0] * og_highResolutionSnapScale);
vec4 v0 = vec4(center.xy - halfSize, 0, 1.0);
vec4 v1 = vec4(center.xy + vec2(halfSize.x, -halfSize.y), 0, 1.0);
vec4 v2 = vec4(center.xy + vec2(-halfSize.x, halfSize.y), 0, 1.0);
vec4 v3 = vec4(center.xy + halfSize, 0, 1.0);
gl_Position = og_viewportOrthographicMatrix * v0;
fsTextureCoordinates = vec2(0.0, 0.0);
EmitVertex();
gl_Position = og_viewportOrthographicMatrix * v1;
fsTextureCoordinates = vec2(1.0, 0.0);
EmitVertex();
gl_Position = og_viewportOrthographicMatrix * v2;
fsTextureCoordinates = vec2(0.0, 1.0);
EmitVertex();
gl_Position = og_viewportOrthographicMatrix * v3;
fsTextureCoordinates = vec2(1.0, 1.0);
EmitVertex();
}
//FRAGMENT SHADER
#version 330
in vec2 fsTextureCoordinates;
out vec4 fragmentColor;
uniform sampler2D og_texture0;
uniform vec3 u_color;
void main()
{
vec4 color = texture(og_texture0, fsTextureCoordinates);
if (color.a == 0.0)
{
discard;
}
fragmentColor = vec4(color.rgb * u_color.rgb, color.a);
}
To me it looks like there is some basic coordinate system confusion. I have not checked everything here, but to me,
worldViewProj = modelMatrix * viewMatrix * projectionMatrix;
looks like the wrong way round, as vertices should be multiplied from the right like
projection*view*model*vertex
The same issue is within your shaders.
Also, i am not entirely sure, but it seems you are computing pixel coordinates for gl_Position in the shader (as you are applying some viewporttransform in the function ModelToWindowCoordinates). Since pixel coordinates may e.g. range from 0,0 to 1920,1080 they are not correct for gl_Position, which should be in clip coordinates.
I think you should read up a good tutorial about 3d billboarding and the math, for example
this one looks quite interesting. Then modify the sample code to fit your needs step by step.