How to Make 2D Lighting Better in OpenGL - c++

I want to ask a question about my lighting effect in OpenGL.
I am trying to add lighting, but I don't think it's good and I've seen some 2D lighting pictures which are so much better than mine.
Question: I have made a spotlight but I want it to be dimmer as its light range gets lower and have it more like a natural light, but I can't figure out the solution.
I am using an orthographic matrix with (800, 600) as the window size and I make my meshes with real x, y coords. I send my lightPos and my PlayerPos to the fragment shader and I use the vertex as the width and the height of the mesh so that I can generate lighting for every pixel.
The light is just a basic circle and I don't know how to make it look better. Here are some images. In the fragment shader, I use the Pythagorean Theorem to calculate the distance between the 2 points.
And here is the vertex and fragment Shader
Vetex shader
#version 330
layout (location = 0) in vec3 pos;
layout (location = 1) in vec2 tCoord;
uniform mat4 mat;
out vec2 tCoord0;
out vec2 vPos;
void main(){
tCoord0 = vec2(tCoord.x, 1 - tCoord.y);
gl_Position = mat * vec4(pos, 1.0);
vPos = vec2(pos.x, pos.y);
}
Fragment shader
#version 330
out vec4 color;
uniform sampler2D sampler;
in vec2 tCoord0;
uniform vec3 objColor;
uniform vec2 lightPos;
uniform vec2 xyPos;
in vec2 vPos;
void main(){
vec4 textureColor = texture2D(sampler, tCoord0);
vec3 ambientLight = vec3(0.3f, 0.3f, 0.3f);
float dx = lightPos.x - (xyPos.x + vPos.x);
float dy = lightPos.y - (xyPos.y + vPos.y);
float dist = sqrt(dx * dx + dy * dy);
if(dist > 0 && dist < 50){
ambientLight = vec3(0.7f, 0.7f, 0.7f) * 0.6f;
}
else if(dist > 50 && dist < 70){
ambientLight = vec3(0.4f, 0.4f, 0.4f) * 0.6f;
}
else{
discard;
}
if((textureColor.x == 0 && textureColor.y == 0 && textureColor.z == 0) || textureColor.a <= 0){
color = vec4(objColor, 1.0) * vec4(ambientLight, 1.0);
}
else{
color = textureColor * vec4(ambientLight, 1.0) * vec4(objColor, 1.0);
}
}
Drawer.cpp
#include <graphics\shader.h>
#include <graphics\texture.h>
#include <graphics\shape.h>
#include <GL\glew.h>
#include <graphics\light.h>
#include <core\TSAContainer.h>
#include <core\drawer.h>
namespace GE{
namespace core{
std::vector<graphics::GraphicComponent*> Drawer::drawables;
GLuint Drawer::buffer;
void Drawer::init(){
glGenFramebuffers(1, &buffer);
}
std::vector<graphics::GraphicComponent*>& Drawer::getAllGraphicComponents(){
return drawables;
}
void Drawer::addDrawable(graphics::GraphicComponent* drawable){
drawables.push_back(drawable);
}
void Drawer::destroy(){
for (unsigned int i = 0; i < drawables.size(); i++)
delete drawables[i];
drawables.clear();
}
void Drawer::render(){
for (std::vector<graphics::GraphicComponent*>::iterator it = drawables.begin(); it != drawables.end(); it++){
if ((*it)->isDraw()){
(*it)->getShader().bind();
int color = getColor(static_cast<graphics::Shape*>(*it)->getColor());
int r = (color >> 16) & 0xff;
int g = (color >> 8) & 0xff;
int b = (color)& 0xff;
(*it)->getShader().setUniform("mat", (*it)->getTransformation().getTransformationMatrix());
(*it)->getShader().setUniform("objColor", r, g, b);
(*it)->getShader().setUniform("xyPos", (*it)->getTransformation().getPosition());
(*it)->getShader().setUniform("sampler", 1);
if (static_cast<graphics::Shape*>(*it)->getLight() != NULL){
static_cast<graphics::Shape*>(*it)->getLight()->update();
}
//(*it)->getShader().setUniform("ambientLight", static_cast<graphics::Shape*>(*it)->getAmbientLight());
glActiveTexture(GL_TEXTURE1);
if ((*it)->getTexture() != NULL)
(*it)->getTexture()->bind();
(*it)->getMesh().draw();
if ((*it)->getTexture() != NULL)
(*it)->getTexture()->unbind();
(*it)->getShader().unbind();
}
}
}
int Drawer::getColor(colorType color){
int col = 0;
if (color == GE_COLOR_BLUE){
col = 0 << 16 | 0 << 8 | 1;
}
else if (GE_COLOR_GREEN == color){
col = 0 << 16 | 1 << 8 | 0;
}
else if (GE_COLOR_RED == color){
col = 1 << 16 | 0 << 8 | 0;
}
else{
col = 1 << 16 | 1 << 8 | 1;
}
return col;
}
Drawer::Drawer(){
}
Drawer::~Drawer(){
}
}
}

float dx = lightPos.x - (xyPos.x + vPos.x);
float dy = lightPos.y - (xyPos.y + vPos.y);
float dist = sqrt(dx * dx + dy * dy);
if(dist > 0 && dist < 50)
{
ambientLight = vec3(0.7f, 0.7f, 0.7f) * 0.6f;
}
else if(dist > 50 && dist < 70)
{
ambientLight = vec3(0.4f, 0.4f, 0.4f) * 0.6f;
}
Here you're using kind of a constant attenuation based on distance. That's going to make that kind of effect of a bright inner circle and dim outer circle with unnaturally hard edges between.
If you want a soft kind of gradient effect, you want to avoid the branching and constants here. We can start with a linear falloff:
float dx = lightPos.x - (xyPos.x + vPos.x);
float dy = lightPos.y - (xyPos.y + vPos.y);
float dist = sqrt(dx * dx + dy * dy);
float max_dist = 70.0f;
float percent = clamp(1.0f - dist / max_dist, 0.0, 1.0f);
ambientLight = vec3(percent, percent, percent);
However, that will probably look kind of ugly to you with a sharp point around the center. We can use an exponential curve instead, like so:
...
percent *= percent;
ambientLight = vec3(percent, percent, percent);
To make it kind of "rounder", you can multiply again:
...
percent *= percent * percent;
ambientLight = vec3(percent, percent, percent);
If that's kind of opposite of what you want visually, you can try sqrt:
float percent = clamp(1.0f - dist / max_dist, 0.0, 1.0f);
percent = sqrt(percent);
Since I don't know exactly what you're after visually, these are some things to try initially. Play with these two and see if you like what you get.
If you really want to take max control over the effect, a cubic bezier curve interpolation might come in handy:
float bezier4(float p1, float p2, float p3, float p4, float t)
{
const float mum1 = 1.0f - t;
const float mum13 = mum1 * mum1 * mum1;
const float mu3 = t * t * t;
return mum13 * p1 + 3 * t * mum1 * mum1 * p2 + 3 * t * t * mum1 * p3 + mu3 * p4;
}
...
float percent = clamp(1.0f - dist / max_dist, 0.0, 1.0f);
// Can play with the first four arguments to achieve the desired effect.
percent = bezier4(0.0f, 0.25f, 0.75f, 1.0f, percent);
ambientLight = vec3(percent, percent, percent);
That will give you a lot of control over the effect, but maybe overkill. Try the other methods first.

Related

Compute Normals After Vertex Deformation?

I am coding a vertex and a fragment shader trying to distort the surface of some water and then computing blinn-phong lighting on the surface. I am able to successfully compute the deformed matrices with a simple noise function, but how can I find the distorted normals? Since it isn't a linear transformation I am stuck, could anyone help?
Here are the relevant files:
vertex shader:
#version 150
uniform mat4 u_Model;
uniform mat4 u_ModelInvTr;
uniform mat4 u_ViewProj;
uniform vec4 u_Color;
uniform int u_Time;
in vec4 vs_Pos; // The array of vertex positions passed to the shader
in vec4 vs_Nor; // The array of vertex normals passed to the shader
in vec4 vs_Col; // The array of vertex colors passed to the shader.
in vec2 vs_UV; // UV coords for texture to pass thru to fragment shader
in float vs_Anim; // 0.f or 1.f To pass thru to fragment shader
in float vs_T2O;
out vec4 fs_Pos;
out vec4 fs_Nor;
out vec4 fs_LightVec;
out vec4 fs_Col;
out vec2 fs_UVs;
out float fs_Anim;
out float fs_dimVal;
out float fs_T2O;
uniform vec4 u_CamPos;
out vec4 fs_CamPos;
const vec4 lightDir = normalize(vec4(0.0, 1.f, 0.0, 0));
mat4 rotationMatrix(vec3 axis, float angle) {
axis = normalize(axis);
float s = sin(angle);
float c = cos(angle);
float oc = 1.0 - c;
return mat4(oc * axis.x * axis.x + c, oc * axis.x * axis.y - axis.z * s, oc * axis.z * axis.x + axis.y * s, 0.0, oc * axis.x * axis.y + axis.z * s, oc * axis.y * axis.y + c, oc * axis.y * axis.z - axis.x * s, 0.0,oc * axis.z * axis.x - axis.y * s, oc * axis.y * axis.z + axis.x * s, oc * axis.z * axis.z + c, 0.0, 0.0, 0.0, 0.0, 1.0);
}
vec4 rotateLightVec(float deg, vec4 LV) {
mat4 R = rotationMatrix(vec3(0,0,1), deg);
return R * LV;
}
float random1(vec3 p) {
return fract(sin(dot(p, vec3(127.1, 311.7, 191.999)))*43758.5453);
}
vec3 random2( vec3 p ) {
return fract( sin( vec3(dot(p, vec3(127.1, 311.7, 58.24)), dot(p, vec3(269.5, 183.3, 657.3)), dot(p, vec3(420.69, 69.420, 469.20))) ) * 43758.5453);
}
void main()
{
fs_Col = vs_Col;
fs_UVs = vs_UV;
fs_Anim = vs_Anim;
fs_T2O = vs_T2O;
mat3 invTranspose = mat3(u_ModelInvTr);
fs_Nor = vec4(invTranspose * vec3(vs_Nor), 0);
vec4 modelposition = u_Model * vs_Pos;
if (vs_Anim != 0) { // if we want to animate this surface
// check region in texture to decide which animatable type is drawn
bool lava = fs_UVs.x >= 13.f/16.f && fs_UVs.y < 2.f/16.f;
bool water = !lava && fs_UVs.x >= 13.f/16.f && fs_UVs.y <= 4.f/16.f;
if (water) {
// define an oscillating time so that model can transition back and forth
float t = (cos(u_Time * 0.05) + 1)/2; // u_Time increments by 1 every frame. Domain [0,1]
vec3 temp = random2(vec3(modelposition.x, modelposition.y, modelposition.z)); // range [0, 1]
temp = (temp - 0.5)/25; // [0, 1/scalar]
modelposition.x = mix(modelposition.x - temp.x, modelposition.x + temp.x, t);
modelposition.y = mix(modelposition.y - temp.y, modelposition.y + 3*temp.y, t);
modelposition.z = mix(modelposition.z - temp.z, modelposition.z + temp.z, t);
} else if (lava) {
// define an oscillating time so that model can transition back and forth
float t = (cos(u_Time * 0.01) + 1)/2; // u_Time increments by 1 every frame. Domain [0,1]
vec3 temp = random2(vec3(modelposition.x, modelposition.y, modelposition.z)); // range [0, 1]
temp = (temp - 0.5)/25; // [0, 1/scalar]
modelposition.x = mix(modelposition.x - temp.x, modelposition.x + temp.x, t);
modelposition.y = mix(modelposition.y - temp.y, modelposition.y + 3*temp.y, t);
modelposition.z = mix(modelposition.z - temp.z, modelposition.z + temp.z, t);
}
}
fs_dimVal = random1(modelposition.xyz/100.f);
fs_LightVec = rotateLightVec(0.001 * u_Time, lightDir); // Compute the direction in which the light source lies
fs_CamPos = u_CamPos; // uniform handle for the camera position instead of the inverse
fs_Pos = modelposition;
gl_Position = u_ViewProj * modelposition;// gl_Position is a built-in variable of OpenGL which is
// used to render the final positions of the geometry's vertices
}
fragment shader:
#version 330
uniform vec4 u_Color; // The color with which to render this instance of geometry.
uniform sampler2D textureSampler;
uniform int u_Time;
uniform mat4 u_ViewProj;
uniform mat4 u_Model;
in vec4 fs_Pos;
in vec4 fs_Nor;
in vec4 fs_LightVec;
in vec4 fs_Col;
in vec2 fs_UVs;
in float fs_Anim;
in float fs_T2O;
in float fs_dimVal;
out vec4 out_Col;
in vec4 fs_CamPos;
float random1(vec3 p) {
return fract(sin(dot(p,vec3(127.1, 311.7, 191.999)))
*43758.5453);
}
float random1b(vec3 p) {
return fract(sin(dot(p,vec3(169.1, 355.7, 195.999)))
*95751.5453);
}
float mySmoothStep(float a, float b, float t) {
t = smoothstep(0, 1, t);
return mix(a, b, t);
}
float cubicTriMix(vec3 p) {
vec3 pFract = fract(p);
float llb = random1(floor(p) + vec3(0,0,0));
float lrb = random1(floor(p) + vec3(1,0,0));
float ulb = random1(floor(p) + vec3(0,1,0));
float urb = random1(floor(p) + vec3(1,1,0));
float llf = random1(floor(p) + vec3(0,0,1));
float lrf = random1(floor(p) + vec3(1,0,1));
float ulf = random1(floor(p) + vec3(0,1,1));
float urf = random1(floor(p) + vec3(1,1,1));
float mixLoBack = mySmoothStep(llb, lrb, pFract.x);
float mixHiBack = mySmoothStep(ulb, urb, pFract.x);
float mixLoFront = mySmoothStep(llf, lrf, pFract.x);
float mixHiFront = mySmoothStep(ulf, urf, pFract.x);
float mixLo = mySmoothStep(mixLoBack, mixLoFront, pFract.z);
float mixHi = mySmoothStep(mixHiBack, mixHiFront, pFract.z);
return mySmoothStep(mixLo, mixHi, pFract.y);
}
float fbm(vec3 p) {
float amp = 0.5;
float freq = 4.0;
float sum = 0.0;
for(int i = 0; i < 8; i++) {
sum += cubicTriMix(p * freq) * amp;
amp *= 0.5;
freq *= 2.0;
}
return sum;
}
void main()
{
vec4 diffuseColor = texture(textureSampler, fs_UVs);
bool apply_lambert = true;
float specularIntensity = 0;
if (fs_Anim != 0) {
// check region in texture to decide which animatable type is drawn
bool lava = fs_UVs.x >= 13.f/16.f && fs_UVs.y < 2.f/16.f;
bool water = !lava && fs_UVs.x >= 13.f/16.f && fs_UVs.y < 4.f/16.f;
if (lava) {
// slowly gyrate texture and lighten and darken with random dimVal from vert shader
vec2 movingUVs = vec2(fs_UVs.x + fs_Anim * 0.065/16 * sin(0.01*u_Time),
fs_UVs.y - fs_Anim * 0.065/16 * sin(0.01*u_Time + 3.14159/2));
diffuseColor = texture(textureSampler, movingUVs);
vec4 warmerColor = diffuseColor + vec4(0.3, 0.3, 0, 0);
vec4 coolerColor = diffuseColor - vec4(0.1, 0.1, 0, 0);
diffuseColor = mix(warmerColor, coolerColor, 0.5 + fs_dimVal * 0.65*sin(0.02*u_Time));
apply_lambert = false;
} else if (water) {
// blend between 3 different points in texture to create a wavy subtle change over time
vec2 offsetUVs = vec2(fs_UVs.x - 0.5f/16.f, fs_UVs.y - 0.5f/16.f);
diffuseColor = texture(textureSampler, fs_UVs);
vec4 altColor = texture(textureSampler, offsetUVs);
altColor.x += fs_dimVal * pow(altColor.x+.15, 5);
altColor.y += fs_dimVal * pow(altColor.y+.15, 5);
altColor.z += 0.5 * fs_dimVal * pow(altColor.z+.15, 5);
diffuseColor = mix(diffuseColor, altColor, 0.5 + 0.35*sin(0.05*u_Time));
offsetUVs -= 0.25f/16.f;
vec4 newColor = texture(textureSampler, offsetUVs);
diffuseColor = mix(diffuseColor, newColor, 0.5 + 0.5*sin(0.025*u_Time)) + fs_dimVal * vec4(0.025);
diffuseColor.a = 0.7;
// ----------------------------------------------------
// Blinn-Phong Shading
// ----------------------------------------------------
vec4 lightDir = normalize(fs_LightVec - fs_Pos);
vec4 viewDir = normalize(fs_CamPos - fs_Pos);
vec4 halfVec = normalize(lightDir + viewDir);
float shininess = 400.f;
float specularIntensity = max(pow(dot(halfVec, normalize(fs_Nor)), shininess), 0);
}
}
// Calculate the diffuse term for Lambert shading
float diffuseTerm = dot(normalize(fs_Nor), normalize(fs_LightVec));
// Avoid negative lighting values
diffuseTerm = clamp(diffuseTerm, 0, 1);
float ambientTerm = 0.3;
float lightIntensity = diffuseTerm + ambientTerm; //Add a small float value to the color multiplier
//to simulate ambient lighting. This ensures that faces that are not
//lit by our point light are not completely black.
vec3 col = diffuseColor.rgb;
// Compute final shaded color
if (apply_lambert) {
col = col * lightIntensity + col * specularIntensity;
}
// & Check the rare, special case where we draw face between two diff transparent blocks as opaque
if (fs_T2O != 0) {
out_Col = vec4(col, 1.f);
} else {
out_Col = vec4(col, diffuseColor.a);
}
// distance fog!
vec4 fogColor = vec4(0.6, 0.75, 0.9, 1.0);
float FC = gl_FragCoord.z / gl_FragCoord.w / 124.f;
float falloff = clamp(1.05 - exp(-1.05f * (FC - 0.9f)), 0.f, 1.f);
out_Col = mix(out_Col, fogColor, falloff);
}
I tried implementing blinn-phong in the fragment shader, but I think it is wrong simple from the wrong normals. I think this can be done with some sort of tangent and cross product solution, but how can I know the tangent of the surface given we only know the vertex position?
I am not using unity, just bare c++ and most of the answers I am finding online are for java or unity which I do not understand.`

Refraction in ray tracer produces odd results, how do I combine all color components?

I am writing a ray tracer, so far with only spheres, in C++ and after implementing Phong's reflection model, shadows and reflections, everything seemed to work fine. When I implemented refractions and fresnel I can't seem to get things to look right. I have been thinking whether or not it could be because of how I move the rayOrigin when I am inside/outside the sphere object but after trying and googling I still can't get it right.
Below is an image. The gray background is a large diffuse sphere and the smaller blue sphere behind the red sphere is also diffuse. The others are reflective and refractive with ior 1.5-1.6. There are two point lights, on slightly to left and one slighly to the right.
As seen in the image, the spheres don't appear transparent at all. There are also noticeable circular color differences on the spheres. Maybe this can be because of the way I combine the colors for each pixel in my trace function:
Vec3 trace(Vec3& rayOrigin, Vec3& rayDirection, unsigned recursiveDepth, std::vector<Sphere>& spheres, std::vector<Light>& lights, RenderOption& options) {
//Finding nearest intersecting object
float nearestDepth = 1e8;
Sphere nearestObject;
unsigned id = 0;
Vec3 origin = rayOrigin + rayDirection * BIAS;
for (unsigned i = 0; i < spheres.size(); ++i) {
if (spheres[i].intersect(origin, rayDirection)) {
if (spheres[i].depth < nearestDepth) {
nearestDepth = spheres[i].depth;
nearestObject = spheres[i];
id = i;
}
}
}
Vec3 backgroundColor = Vec3(0.0f, 0.0f, 0.0f);
if (!nearestObject.exists) {
//No intersecting object -> background cooler
return backgroundColor;
} else {
Vec3 totalColor;
Vec3 lightDirection;
//Ambient color
totalColor += options.ambientColor * nearestObject.ambientColor; //Ambient color set to 0
//Calculate fresnel, update fresnelReflection & fresnelRefraction of nearestObject sent in
fresnel(rayDirection, nearestObject);
//Recursive reflection and refraction
if ((nearestObject.reflectivity > 0.0f || nearestObject.transparency > 0.0f) && recursiveDepth < options.recursionDepth) {
//Reflection case
if (nearestObject.fresnelReflection > 0.0f) {
Vec3 reflection = computeReflection(rayDirection, nearestObject.normal);
Vec3 reflectedColor = trace(nearestObject.intersection, reflection, ++recursiveDepth, spheres, lights, options);
totalColor += reflectedColor * nearestObject.fresnelReflection;
}
//Refraction case
if (nearestObject.fresnelRefraction > 0.0f) {
Vec3 refractionDirection = computeRefraction(rayDirection, nearestObject.normal, nearestObject.indexOfRefraction, nearestObject.intersection);
Vec3 refractedColor = trace(nearestObject.intersection, refractionDirection, ++recursiveDepth, spheres, lights, options);
totalColor += refractedColor * nearestObject.fresnelRefraction;
}
}
//Phong reflection model and shadows
for (unsigned i = 0; i < lights.size(); ++i) {
//Shadow ray
Vec3 intersectionPointBias = nearestObject.intersection + nearestObject.normal * BIAS;
Vec3 shadowRayDirection = lights[i].position - intersectionPointBias; //normalized in intersect function
for (unsigned k = 0; k < spheres.size(); ++k) //kolla inte nearestObject mot sig själv
{
if (!spheres[k].intersect(intersectionPointBias, shadowRayDirection))
{
//Diffuse
lightDirection = lights[i].position - nearestObject.normal;
lightDirection.normalize();
totalColor += lights[i].diffuse * std::max(0.0f, nearestObject.normal.dot(lightDirection)) * nearestObject.diffuseColor;
//Specular
Vec3 viewDirection = nearestObject.intersection - options.cameraOrigin;
viewDirection.normalize();
Vec3 reflection = lightDirection - nearestObject.normal * 2 * (nearestObject.normal.dot(lightDirection));
reflection.normalize();
totalColor += lights[i].specular * nearestObject.specularColor * std::max(0.0f, pow(reflection.dot(viewDirection), nearestObject.shininessCoefficient));
}
}
}
return totalColor;
}
}
Here are the other relevant functions:
computeRefraction:
Vec3 computeRefraction(const Vec3& I, const Vec3& N, const float &ior, Vec3& intersection) {
Vec3 normal = N; normal.normalize();
normal = normal;
Vec3 incident = I; incident.normalize();
float cosi = incident.dot(normal);
float n1, n2;
if (cosi > 0.0f) {
//Incident and normal have same direction, INSIDE sphere
n1 = ior;
n2 = 1.0f;
normal = -normal;
} else {
//Incident and normal have opposite direction, OUTSIDE sphere
n1 = 1.0f;
n2 = ior;
cosi = -cosi;
}
float eta = n1 / n2;
float k = 1.0f - (eta * eta) * (1.0f - cosi * cosi);
if (k < 0.0f) {
//internal reflection
Vec3 reflectionRay = computeReflection(incident, normal);
intersection = intersection + (normal * BIAS);
return reflectionRay;
} else {
Vec3 refractionVector = incident * eta + normal * (eta * cosi - sqrt(k));
refractionVector.normalize();
intersection = intersection - (normal * BIAS);
return refractionVector;
}
}
fresnel:
void fresnel(const Vec3& I, Sphere& obj) {
Vec3 normal = obj.normal;
Vec3 incident = I;
float cosi = clamp(-1.0f, 1.0f, incident.dot(normal));
float etai = 1.0f, etat = obj.indexOfRefraction;
if (cosi > 0) {
std::swap(etai, etat);
}
float sint = etai / etat * sqrt(std::max(0.0f, 1 - cosi * cosi));
if (sint >= 1) {
obj.fresnelReflection = 1.0f;
obj.fresnelRefraction = 0.0f;
} else {
float cost = sqrt(std::max(0.0f, 1 - sint * sint));
cosi = abs(cost);
float Rs = ((etat * cosi) - (etai * cost)) / ((etat * cosi) + (etai * cost));
float Rp = ((etai * cosi) - (etat * cost)) / ((etai * cosi) + (etat * cost));
obj.fresnelReflection = (Rs * Rs + Rp * Rp) / 2;
obj.fresnelRefraction = 1.0f - obj.fresnelReflection;
}
}
reflection:
Vec3 computeReflection(const Vec3& rayDirection, const Vec3& objectNormal){
Vec3 normal = objectNormal;
Vec3 incident = rayDirection;
Vec3 reflection = incident - normal * (normal.dot(rayDirection)) * 2;
reflection.normalize();
return reflection;
}
Any help in understanding and resolving these rendering issues would be greatly appreciated as no other posts or theory has helped resolve this on my own this past week. Thank you!

How to implement refraction light in fragment shader?

I am working on an OpenGL ray-tracer, which is capable of loading obj files and ray-trace it. My application loads the obj file with assimp and then sends all of the triangle faces (with the primitive coordinates and te material coefficients as well) to the fragment shader by using shader storage objects. The basic structure is about to render the results to a quad from the fragment shader.
I have trouble with the ray-tracing part in the fragment shader, but first let's introduce it.
For diffuse light, Lambert's cosine law, for specular light Phong-Blinn model was used. In case of total reflection a weightvariable is used to make the reflected light has an effect on other objects as well. The weight is calculated with approximating the Fresnel equation by Schlick method. In the image below, you can see, that the plane works like a mirror reflecting the image of the cube above.
I would like to make the cube appear as a glass object (like a glass sphere), which has refracting and reflecting effects as well. Or at least refracts the light. In the image above you can see a refracting effect on cube, but it is not as good, as it should be. I searched examples how to implement it, but until now, I recognized fresnel euquation has to be used just like in the reflection part.
Here is fragment my shader:
vec3 Fresnel(vec3 F0, float cosTheta) {
return F0 + (vec3(1, 1, 1) - F0) * pow(1-cosTheta, 5);
}
float schlickApprox(float Ni, float cosTheta){
float F0=pow((1-Ni)/(1+Ni), 2);
return F0 + (1 - F0) * pow((1 - cosTheta), 5);
}
vec3 trace(Ray ray){
vec3 weight = vec3(1, 1, 1);
const float epsilon = 0.0001f;
vec3 outRadiance = vec3(0, 0, 0);
int maxdepth=5;
for (int i=0; i < maxdepth; i++){
Hit hit=traverseBvhTree(ray);
if (hit.t<0){ return weight * lights[0].La; }
vec4 textColor = texture(texture1, vec2(hit.u, hit.v));
Ray shadowRay;
shadowRay.orig = hit.orig + hit.normal * epsilon;
shadowRay.dir = normalize(lights[0].direction);
// Ambient Light
outRadiance+= materials[hit.mat].Ka.xyz * lights[0].La*textColor.xyz * weight;
// Diffuse light based on Lambert's cosine law
float cosTheta = dot(hit.normal, normalize(lights[0].direction));
if (cosTheta>0 && traverseBvhTree(shadowRay).t<0) {
outRadiance +=lights[0].La * materials[hit.mat].Kd.xyz * cosTheta * weight;
// Specular light based on Phong-Blinn model
vec3 halfway = normalize(-ray.dir + lights[0].direction);
float cosDelta = dot(hit.normal, halfway);
if (cosDelta > 0){
outRadiance +=weight * lights[0].Le * materials[hit.mat].Ks.xyz * pow(cosDelta, materials[hit.mat].shininess); }
}
float fresnel=schlickApprox(materials[hit.mat].Ni, cosTheta);
// For refractive materials
if (materials[hit.mat].Ni < 3)
{
/*this is the under contruction part.*/
ray.orig = hit.orig - hit.normal*epsilon;
ray.dir = refract(ray.dir, hit.normal, materials[hit.mat].Ni);
}
// If the refraction index is more than 15, treat the material as mirror.
else if (materials[hit.mat].Ni >= 15) {
weight *= fresnel;
ray.orig=hit.orig+hit.normal*epsilon;
ray.dir=reflect(ray.dir, hit.normal);
}
}
return outRadiance;
}
Update 1
I updated the trace method in the shader. As far as I understand the physics of light, if there is a material, which reflects and refracts the light, I have to treat two cases according to this.
In this case of reflection, I added a weight to the diffuse light calculation: weight *= fresnel
In case of refraction light, the weight is weight*=1-fresnel.
Moreover, I calculated the ray.orig and ray.dir connected to the cases and refraction calculation only happens when it is not the case of total internal reflection (fresnel is smaller than 1).
The modified trace method:
vec3 trace(Ray ray){
vec3 weight = vec3(1, 1, 1);
const float epsilon = 0.0001f;
vec3 outRadiance = vec3(0, 0, 0);
int maxdepth=3;
for (int i=0; i < maxdepth; i++){
Hit hit=traverseBvhTree(ray);
if (hit.t<0){ return weight * lights[0].La; }
vec4 textColor = texture(texture1, vec2(hit.u, hit.v));
Ray shadowRay;
shadowRay.orig = hit.orig + hit.normal * epsilon;
shadowRay.dir = normalize(lights[0].direction);
// Ambient Light
outRadiance+= materials[hit.mat].Ka.xyz * lights[0].La*textColor.xyz * weight;
// Diffuse light based on Lambert's cosine law
float cosTheta = dot(hit.normal, normalize(lights[0].direction));
if (cosTheta>0 && traverseBvhTree(shadowRay).t<0) {
outRadiance +=lights[0].La * materials[hit.mat].Kd.xyz * cosTheta * weight;
// Specular light based on Phong-Blinn model
vec3 halfway = normalize(-ray.dir + lights[0].direction);
float cosDelta = dot(hit.normal, halfway);
if (cosDelta > 0){
outRadiance +=weight * lights[0].Le * materials[hit.mat].Ks.xyz * pow(cosDelta, materials[hit.mat].shininess); }
}
float fresnel=schlickApprox(materials[hit.mat].Ni, cosTheta);
// For refractive/reflective materials
if (materials[hit.mat].Ni < 7)
{
bool outside = dot(ray.dir, hit.normal) < 0;
// compute refraction if it is not a case of total internal reflection
if (fresnel < 1) {
ray.orig = outside ? hit.orig-hit.normal*epsilon : hit.orig+hit.normal*epsilon;
ray.dir = refract(ray.dir, hit.normal,materials[hit.mat].Ni);
weight *= 1-fresnel;
continue;
}
// compute reflection
ray.orig= outside ? hit.orig+hit.normal*epsilon : hit.orig-hit.normal*epsilon;
ray.dir= reflect(ray.dir, hit.normal);
weight *= fresnel;
continue;
}
// If the refraction index is more than 15, treat the material as mirror: total reflection
else if (materials[hit.mat].Ni >= 7) {
weight *= fresnel;
ray.orig=hit.orig+hit.normal*epsilon;
ray.dir=reflect(ray.dir, hit.normal);
}
}
return outRadiance;
}
Here is a snapshot connected to the update. Slightly better, I guess.
Update 2:
I found an iterative algorithm which is using a stack to visualize refractive and reflected rays in opengl: it is on page 68.
I modified my frag shader according to this. Almost fine, except for the back faces, which are completely black. Some pictures attached.
Here is the trace method of my frag shader:
vec3 trace(Ray ray){
vec3 color;
float epsilon=0.001;
Stack stack[8];// max depth
int stackSize = 0;// current depth
int bounceCount = 0;
vec3 coeff = vec3(1, 1, 1);
bool continueLoop = true;
while (continueLoop){
Hit hit = traverseBvhTree(ray);
if (hit.t>0){
bounceCount++;
//----------------------------------------------------------------------------------------------------------------
Ray shadowRay;
shadowRay.orig = hit.orig + hit.normal * epsilon;
shadowRay.dir = normalize(lights[0].direction);
color+= materials[hit.mat].Ka.xyz * lights[0].La * coeff;
// Diffuse light
float cosTheta = dot(hit.normal, normalize(lights[0].direction));// Lambert-féle cosinus törvény alapján.
if (cosTheta>0 && traverseBvhTree(shadowRay).t<0) {
color +=lights[0].La * materials[hit.mat].Kd.xyz * cosTheta * coeff;
vec3 halfway = normalize(-ray.dir + lights[0].direction);
float cosDelta = dot(hit.normal, halfway);
// Specular light
if (cosDelta > 0){
color +=coeff * lights[0].Le * materials[hit.mat].Ks.xyz * pow(cosDelta, materials[hit.mat].shininess); }
}
//---------------------------------------------------------------------------------------------------------------
if (materials[hit.mat].indicator > 3.0 && bounceCount <=2){
float eta = 1.0/materials[hit.mat].Ni;
Ray refractedRay;
refractedRay.dir = dot(ray.dir, hit.normal) <= 0.0 ? refract(ray.dir, hit.normal, eta) : refract(ray.dir, -hit.normal, 1.0/eta);
bool totalInternalReflection = length(refractedRay.dir) < epsilon;
if(!totalInternalReflection){
refractedRay.orig = hit.orig + hit.normal*epsilon*sign(dot(ray.dir, hit.normal));
refractedRay.dir = normalize(refractedRay.dir);
stack[stackSize].coeff = coeff *(1 - schlickApprox(materials[hit.mat].Ni, dot(ray.dir, hit.normal)));
stack[stackSize].depth = bounceCount;
stack[stackSize++].ray = refractedRay;
}
else{
ray.dir = reflect(ray.dir, -hit.normal);
ray.orig = hit.orig - hit.normal*epsilon;
}
}
else if (materials[hit.mat].indicator == 0){
coeff *= schlickApprox(materials[hit.mat].Ni, dot(-ray.dir, hit.normal));
ray.orig=hit.orig+hit.normal*epsilon;
ray.dir=reflect(ray.dir, hit.normal);
}
else { //Diffuse Material
continueLoop=false;
}
}
else {
color+= coeff * lights[0].La;
continueLoop=false;
}
if (!continueLoop && stackSize > 0){
ray = stack[stackSize--].ray;
bounceCount = stack[stackSize].depth;
coeff = stack[stackSize].coeff;
continueLoop = true;
}
}
return color;
}

Ray tracing using the Phong illumination model: bizarre color in lighting seemingly fixed by reducing light intensity?

I'm writing a simple ray-tracer with lighting using Phong illumination model. But the problem is that there's part of the sphere display a whole different color. For example, the sphere should be only green in this.
I tried to reduce the light intensity, then it somehow displays correctly like this.
This is the code for primary rays
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
Ray ray(gCamera);
float x = iX + j * pSize;
float y = iY - i * pSize;
ray.v = vec3(x * scale, y * scale, 0) - gCamera;
gPixels[i][j] = trace(ray);
}
}
And this is the code for the intersection (testing with sphere at origin without any transformation)
double findIntersection(const Ray& ray) {
dvec3 u = mXfmInverse * dvec4(ray.u, 1.0);
dvec3 v = mXfmInverse * dvec4(ray.v, 0.0);
double a = glm::dot(v, v);
double b = 2 * glm::dot(u, v);
double c = glm::dot(u, u) - 1;
double delta = b * b - 4 * a * c;
if (delta < 0) return -1;
double root = sqrt(delta);
double t0 = 0.5 * (-b - root) / a;
if (t0 >= 0) return t0;
double t1 = 0.5 * (-b + root) / a;
return t1 >= 0 ? t1 : -1;
}
and calculating Phong illumination
Material material = ray.sphere->getMaterial();
// diffuse
dvec3 center = ray.sphere->getXfm() * vec4(0, 0, 0, 1);
dvec3 normal = glm::normalize(hitPoint - center);
dvec3 lightDir = glm::normalize(light.position - hitPoint);
double lambertian = max(glm::dot(normal, lightDir), 0.0);
// specular
double specular = 0;
if (lambertian > 0) {
dvec3 viewDir = glm::normalize(-ray.v);
dvec3 reflectDir = glm::reflect(-lightDir, normal);
specular = pow(max(dot(viewDir, reflectDir), 0.0), material.shininess);
}
dvec3 color = lambertian * material.diffuse + specular * material.specular;
return color * light.color;
}

Is it possible to draw simple geometrical shapes in a Pixel Shader?

I'm currently learning about shaders and graphics pipelines and I was wondering if a pixel shader could be used to create, for example, a triangle or a more complex shape like a zigzag.
Could this be done without the use of a vertex shader?
Answer is yes! You can draw anything you want using pixel shader by implementing a ray Tracer. Here is a sample code:
uniform vec3 lightposition;
uniform vec3 cameraposition;
uniform float motion;
struct Ray
{
vec3 org;
vec3 dir;
};
struct Sphere
{
vec3 Center;
float Radius;
vec4 Color;
float MatID;
float id;
};
struct Intersection
{
float t;
vec3 normal;
vec3 hitpos;
vec4 color;
float objectid;
float materialID;
};
bool sphereIntersect(Ray eyeray, Sphere sp, inout Intersection intersection)
{
float t1=0.0;
eyeray.dir = normalize(eyeray.dir);
float B = 2.0 *( ( eyeray.dir.x * (eyeray.org.x - sp.Center.x ) )+ ( eyeray.dir.y *(eyeray.org.y - sp.Center.y )) + ( eyeray.dir.z * (eyeray.org.z - sp.Center.z ) ));
float C = pow((eyeray.org.x - sp.Center.x),2.0) + pow((eyeray.org.y - sp.Center.y),2.0) + pow((eyeray.org.z - sp.Center.z),2.0) - pow(sp.Radius,2.0);
float D = B*B - 4.0*C ;
if(D>=0.0)
{
t1= (-B - pow(D, .5)) / 2.0;
if (t1 < 0.0)
{
t1 = (-B + pow(D, .5)) / 2.0;
if( t1 < 0.0)
return false;
else
{
if (t1 > 1e-2 && t1 < intersection.t)
{
intersection.t = t1;
intersection.materialID = sp.MatID;
intersection.hitpos = eyeray.org + t1 * eyeray.dir;
intersection.normal = normalize(intersection.hitpos - sp.Center);
intersection.color = sp.Color;
intersection.objectid = sp.id;
return true;
}
}
}
else
{
if(t1 > 1e-2 && t1 < intersection.t)
{
intersection.t = t1;
intersection.materialID = sp.MatID;
intersection.hitpos = eyeray.org + t1 * eyeray.dir;
intersection.normal = normalize(intersection.hitpos - sp.Center);
intersection.color = sp.Color;
intersection.objectid = sp.id;
return true;
}
}
}
else
return false;
}
void findIntersection(Ray ray, inout Intersection intersection)
{
intersection.t = 1e10;
intersection.materialID = 0.0;
Sphere sp1 = Sphere(vec3(-2.0,0.0,-5.0),1.5,vec4(0.5, 0.1, 0.5, 1.0),1.0,1.0);
Sphere sp2 = Sphere(vec3( 2.0,0.0,-5.0),1.5,vec4(0.5,0.5,0.1,1.0),1.0,2.0);
Sphere sp3 = Sphere(vec3( 0.0,3.0,-5.0),1.5,vec4(0.1,0.5,0.5,1.0),1.0,3.0);
sphereIntersect(ray, sp1, intersection);
sphereIntersect(ray, sp2, intersection);
sphereIntersect(ray, sp3, intersection);
}
vec4 CalculateColor(vec4 ambient ,float shiness,vec3 intersection, vec3 normal);
Ray ReflectedRay(vec3 Normal,Ray EyeRay,vec3 intersection);
vec4 GetColor(Ray ray)
{
Ray currentRay = ray;
vec4 finalColor = vec4(0.0);
for(int bounce = 1 ; bounce < 4 ; bounce++)
{
Intersection intersection;
intersection.objectid = 0.0;
findIntersection(currentRay, intersection);
if (intersection.materialID == 0.0) // We could not find any object. We return the background color
return finalColor;
else if (intersection.materialID == 1.0)
{
vec3 lv = lightposition - intersection.hitpos;
vec3 nlv = normalize(lv);
Intersection shadowIntersection;
Ray shadowRay = Ray(intersection.hitpos, nlv);
shadowIntersection.objectid = intersection.objectid;
findIntersection(shadowRay, shadowIntersection);
if (shadowIntersection.t > length(lv) || shadowIntersection.t < 1)
{
finalColor = finalColor + float(1.0f/bounce) * CalculateColor(intersection.color, 100.0, intersection.hitpos, intersection.normal);;
}
else
{
finalColor = finalColor + float(1.0f/bounce) * intersection.color;
}
//currentRay = Ray(intersection.hitpos, reflect(ray.dir, intersection.normal));
currentRay = ReflectedRay(intersection.normal,ray,intersection.hitpos);
}
}
return finalColor;
}
Ray createRay(float ScreenWidth,float ScreenHeight)
{
Ray toret;
toret.org = cameraposition;
float left = -3.0;
float bottom = -3.0;
float screenZ = -3.0;
float su = -3.0 + gl_FragCoord.x/ScreenWidth * 6; //gl_FragCoord gives you the current x and y component of your current pixel
float sv = -3.0 + gl_FragCoord.y/ScreenHeight * 6;
float sz = screenZ - cameraposition.z;
toret.dir = normalize(vec3(su,sv,sz));
//vec2 p = (gl_FragCoord.xy/resolution) * 2 ;
//toret.dir = normalize(vec3(p, -1.0));
return toret;
}
Ray ReflectedRay(vec3 Normal,Ray EyeRay,vec3 intersection)
{
Ray reflection;
reflection.dir = EyeRay.dir - 2 * Normal * dot(EyeRay.dir,Normal);
reflection.org = intersection + reflection.dir * 0.01;
return reflection;
}
vec4 CalculateColor(vec4 ambient ,float shiness,vec3 intersection, vec3 normal)
{
//intensities
vec3 Idifuse = vec3(1, 1, 1);
vec3 Iambient = vec3(0.8, 0.8, 0.8);
vec3 Ispecular = vec3(1,1,1);
vec3 kDifuse = vec3(0.5,0.5,0.5); //for difuse
vec3 kSpecular = vec3(0.75, 0.6, 0.3); //for specular
vec3 kAmbient = vec3(0.1, 0.2, 0.3); //for ambient
//vec4 kSpecular = vec4(0.5,0.5,0.5,1.0);
//vec4 kDifuse = vec4(0.5,0.5,0.5,1.0);
float ColorDifuse = max(dot(normal,lightposition),0.0) * kDifuse;
//vector calculations
vec3 l = normalize(lightposition - intersection); //light vector
vec3 n = normalize(normal); // normalVector of point in the sea
vec3 v = normalize(cameraposition - intersection); // view Vector
vec3 h = normalize(v + l); // half Vector
vec3 difuse = kDifuse * Idifuse * max(0.0, dot(n, l));
vec3 specular = kSpecular * Ispecular * pow(max(0.0, dot(n, h)), shiness);
vec3 color = ambient.xyz + difuse + specular;
return vec4(color,1.0);
gl_FragColor = vec4(color,1.0);
}
void main()
{
if(lightposition == vec3(0.0,0.0,0.0))
gl_FragColor = vec4(0.0,1.0,0.0,1.0);
Ray eyeray = createRay(600.0,600.0);
gl_FragColor = GetColor(eyeray);
}
A useful technique is to use a fragment shader (I'm an OpenGL guy) with point sprites. Point sprites in OpenGL 3+ get rendered as squares of pixels, with the size of the square (gl_PointSize) set by the vertex shader.
In the fragment shader, gl_PointCoord has the x and y coords of this particular pixel within the square, from 0.0 to 1.0. So you can draw a circle by testing if gl_PointCoord.x and gl_PointCoord.y are both within the radius and discarding if not, a framed square by checking that .x and .y are with some distance of the edge, and so on. It's classic maths, define a function(x, y) which returns true for points within the shape you want, false if not.
The Orange book, OpenGL Shading Language 3rd edition, has some examples (which in turn come from RenderMan) of how to draw such shapes.
Hope this helps.
What you want is called procedural textures or procedural shading.
You can draw different shapes with a simple (and not so simple) math.
Take a look for some examples here:
http://glslsandbox.com/
More on google.