I would like to replicate this in SFML, so I wrote this:
Main.cpp
#include <SFML/Graphics.hpp>
#define WIDTH 800
int main() {
sf::RenderWindow window(sf::VideoMode(WIDTH, WIDTH), "Test");
sf::Shader shader, buffer;
shader.loadFromFile("shader.frag", sf::Shader::Fragment);
buffer.loadFromFile("buffer.frag", sf::Shader::Fragment);
sf::Shader *shaders[] = {&shader, &buffer};
sf::Texture texture;
texture.create(WIDTH, WIDTH);
sf::RenderTexture renderTexture;
sf::Vector2f mousePos = sf::Vector2f(-1.f, -1.f);
for(auto &shader : shaders) {
shader->setUniform("iChannel0", texture);
shader->setUniform("iResolution", sf::Glsl::Vec2(WIDTH, WIDTH));
}
auto time = sf::Clock();
sf::Sprite sprite(texture);
while(window.isOpen()) {
sf::Event currEvent;
while(window.pollEvent(currEvent)) {
switch(currEvent.type) {
case(sf::Event::Closed): window.close(); break;
case(sf::Event::MouseMoved):
mousePos = sf::Vector2f(
currEvent.mouseMove.x, currEvent.mouseMove.y);
break;
}
}
buffer.setUniform("iTime", time.getElapsedTime().asSeconds());
buffer.setUniform("iMouse", sf::Glsl::Vec2(
mousePos.x, WIDTH - mousePos.y));
sf::RenderStates states;
states.blendMode = sf::BlendAdd;
states.shader = &buffer;
renderTexture.clear(sf::Color::Transparent);
renderTexture.draw(sf::Sprite(texture), states);
renderTexture.display();
sprite.setTexture(renderTexture.getTexture());
window.clear(sf::Color::Black);
window.draw(sprite, &shader);
window.display();
}
}
Buffer.frag
#version 140
out vec4 out_Colour;
uniform sampler2D iChannel0;
uniform vec2 iResolution;
uniform vec2 iMouse;
uniform float iTime;
void main(void){
vec2 mouse = iMouse.xy;
if(mouse.x <= 0.)
mouse = vec2(iResolution.x * (sin(iTime)+1.)/2., iResolution.y/2.);
vec3 blob = vec3(.11-clamp(
length((
gl_FragCoord.xy-mouse.xy)/iResolution.x),0.,.11))*2.;
vec3 stack = texture(
iChannel0,gl_FragCoord.xy/iResolution.xy).xyz * vec3(0.99,.982,.93);
out_Colour = vec4(stack + blob, 1.0);
}
Shader.frag
#version 140
out vec4 out_Colour;
uniform sampler2D iChannel0;
uniform vec2 iResolution;
void main(void){
out_Colour = vec4(texture(
iChannel0, gl_FragCoord.xy / iResolution.xy).xyz, 1.0);
}
I am probably missing something with the buffers.
Similar questions without clear answer:
shadertoy to SFML
Related
In the following example I am trying to have a trailing glow by using sf::RenderTexture. Unfortunately, the texture seem to be mirrored. I don't understand why.
Main program
#include <SFML/Graphics.hpp>
#include <iostream>
#define WIDTH 800
int main() {
sf::RenderWindow window(sf::VideoMode(WIDTH, WIDTH), "Test");
sf::Shader buffer;
buffer.loadFromFile("buffer.frag", sf::Shader::Fragment);
sf::Texture texture;
texture.create(WIDTH, WIDTH);
sf::Sprite sprite(texture);
sf::RenderTexture renderTexture;
renderTexture.create(WIDTH, WIDTH);
sf::Vector2f mousePos = sf::Vector2f(-1.f, -1.f);
buffer.setUniform("iChannel0", texture);
buffer.setUniform("iResolution", sf::Glsl::Vec2(WIDTH, WIDTH));
auto time = sf::Clock();
window.clear();
while(window.isOpen()) {
sf::Event currEvent;
while(window.pollEvent(currEvent))
if (currEvent.type == sf::Event::Closed) window.close();
buffer.setUniform("iTime", (float)time.getElapsedTime().asSeconds());
renderTexture.clear();
renderTexture.draw(sprite, &buffer);
renderTexture.display();
texture.update(renderTexture.getTexture());
window.clear();
window.draw(sprite);
window.display();
}
}
Fragment Shader
#version 140
out vec4 out_Colour;
uniform sampler2D iChannel0;
uniform vec2 iResolution;
uniform float iTime;
void main(void) {
vec2 spot = vec2(sin(iTime*5)*200+400, cos(iTime*15.0)*70+150);
vec3 color = vec3(sin(iTime*5), sin(iTime*1), sin(iTime*10)) * 0.5 + 0.5;
vec2 uv = (gl_FragCoord.xy - spot.xy) / iResolution.xy;
vec3 blob = 1.0 - vec3(smoothstep(length(uv), 0, .02));
vec3 stack = texture(iChannel0, gl_FragCoord.xy / iResolution.xy).xyz;
stack -= vec3(0.003);
out_Colour = vec4(stack + color * blob* 0.1, 1.0);
}
I get this weird result where half the screen is mirrored:
But if I render directly the buffer I don't get this result:
window.clear();
window.draw(sprite, &buffer);
window.display();
texture.update(window);
I'm creating a few glowing particles in raylib using shaders and the particles are supposed to move along with the mouse but when compiling it gets stuck to the bottom left corner and the particles dont move.
How it Looks
The c++ code
#include <raylib.h>
#include <vector>
const int W = 400;
const int H = 400;
std::vector<Vector2> particle;
float remap(float value, float low1, float high1, float low2, float high2) {
return low2 + (value - low1) * (high2 - low2) / (high1 - low1);
}
int main() {
SetConfigFlags( FLAG_WINDOW_RESIZABLE );
InitWindow(W, H, "FireWorks");
Shader shader = LoadShader("../assets/vert.glsl", "../assets/frag.glsl");
Texture2D texture = LoadTextureFromImage(GenImageColor(W, H, BLUE));
int resolLoc = GetShaderLocation(shader, "resolution");
int particleLoc = GetShaderLocation(shader, "particle");
int particleCountLoc = GetShaderLocation(shader, "particleCount");
float res[2] = {(float)W, (float)H};
SetShaderValue(shader, resolLoc, res, SHADER_UNIFORM_VEC2);
SetTargetFPS(60);
while (!WindowShouldClose()) {
BeginDrawing();
ClearBackground(BLACK);
particle.push_back(Vector2{(float)GetMouseX(), (float)GetMouseY()});
int removeCount = 1;
for (int i = 0; i < removeCount; i++) {
if (particle.size() == 0) break;
if (particle.size() > 30) {
particle.erase(particle.begin() + i);
}
}
BeginShaderMode(shader);
float particles[30][2];
for ( int i = 0; i < particle.size(); i++) {
particles[i][0] = remap(particle[i].x, 0, W, 0.0, 1.0);
particles[i][1] = remap(particle[i].y, 0, H, 1.0, 0.0);
}
int pSize = particle.size();
SetShaderValue(shader, particleCountLoc, &pSize, SHADER_UNIFORM_INT);
SetShaderValue(shader, particleLoc, particles, SHADER_UNIFORM_VEC2);
DrawTextureRec(texture, (Rectangle) { 0, 0, (float)texture.width, (float) -texture.height }, (Vector2) { 0, 0}, RAYWHITE);
DrawRectangle(0, 0, W, H, BLACK);
EndShaderMode();
EndDrawing();
}
UnloadTexture(texture);
UnloadShader(shader);
CloseWindow();
return 0;
}
The Vertex Shader
#version 330
// Input vertex attributes
in vec3 vertexPosition;
in vec2 vertexTexCoord;
in vec3 vertexNormal;
in vec4 vertexColor;
// Input uniform values
uniform mat4 mvp;
// Output vertex attributes (to fragment shader)
out vec2 fragTexCoord;
out vec4 fragColor;
// NOTE: Add here your custom variables
void main()
{
// Send vertex attributes to fragment shader
fragTexCoord = vertexTexCoord;
fragColor = vertexColor;
// Calculate final vertex position
gl_Position = mvp * vec4(vertexPosition, 1.0);
}
The Fragment Shader
#version 330
// Input vertex attributes (from vertex shader)
in vec2 fragTexCoord;
in vec4 fragColor;
// Input uniform values
uniform sampler2D texture0;
uniform vec4 colDiffuse;
// Output fragment color
out vec4 finalColor;
// NOTE: Add here your custom variables
uniform vec2 resolution;
uniform int particleCount;
uniform vec2 particle[30];
void main() {
// Texel color fetching from texture sampler
vec4 texelColor = texture(texture0, fragTexCoord);
vec2 st = gl_FragCoord.xy / resolution.xy;
float r = 0.0;
float g = 0.0;
float b = 0.0;
for (int i = 0; i < 30; i++) {
if (i < particleCount) {
vec2 particlePos = particle[i];
float value = float(i) / distance(st, particlePos.xy) * 0.00015;
g += value * 0.5;
b += value;
}
}
finalColor = vec4(r, g, b, 1.0) * texelColor * colDiffuse;
}
The JS version of the code (which works) is here.
If you could point me in the right direction it'd be great.
The uniform particle is of type vec2[30]. An uniform array can needs to be set with SetShaderValueV instead of SetShaderValue:
SetShaderValue(shader, particleLoc, particles, SHADER_UNIFORM_VEC2);
SetShaderValueV(shader, particleLoc, particles[0], SHADER_UNIFORM_VEC2, 30);
I am new to shaders, and I want to animate an object with the vertex shader.
Right now I just want to move it with a constant. For some reason, instead of going in the x-direction of the world, it moves in the x-direction of the camera. (So whenever I turn the camera, the object rotates with me)
The project is in processing, but I don't think it affects the shader.
THE PROCESSING CODE:
PShader sdr;
void setup() {
size(1000, 1000, P3D);
noStroke();
sdr = loadShader("shdFrag.glsl", "shdVert.glsl");
}
void draw() {
background(200);
// Set camera
camera(0, -300, 700, mouseX-500, 0, 200, 0, 1, 0);
// Ground
resetShader();
beginShape();
fill(100);
vertex(-500, 0, 500);
vertex( 500, 0, 500);
vertex( 500, 0, -500);
vertex(-500, 0, -500);
endShape();
// Red Sphere
shader(sdr);
fill(255, 0, 0);
sphere(100);
}
VERTEX SHADER:
uniform mat4 transform;
attribute vec4 position;
attribute vec4 color;
out vec4 vertColor;
void main() {
vec4 pos = position;
pos.x += 300;
vertColor = color;
gl_Position = transform * pos;
}
FRAGMENT SHADER:
#ifdef GL_ES
precision mediump float;
precision mediump int;
#endif
in vec4 vertColor;
void main() {
vec4 color = vertColor;
gl_FragColor = vec4(color.xyz, 1);
}
A GIF of what is happening:
the scene with a sphere
I’m try to porting the shadertoy chromakey example to p5 with webcam as video source. After many time reading documentations of shaders, my code seems not working. I need some help.
I followed this guide to port the code for the p5
Fragment shader code:
#ifdef GL_ES
precision mediump float;
#endif
uniform sampler2D tex0;
uniform sampler2D tex1;
mat4 RGBtoYUV = mat4(0.257, 0.439, -0.148, 0.0,
0.504, -0.368, -0.291, 0.0,
0.098, -0.071, 0.439, 0.0,
0.0625, 0.500, 0.500, 1.0 );
vec4 chromaKey = vec4(0.05, 0.63, 0.14, 1);
vec2 maskRange = vec2(0.005, 0.26);
float colorclose(vec3 yuv, vec3 keyYuv, vec2 tol)
{
float tmp = sqrt(pow(keyYuv.g - yuv.g, 2.0) + pow(keyYuv.b - yuv.b, 2.0));
if (tmp < tol.x)
return 0.0;
else if (tmp < tol.y)
return (tmp - tol.x)/(tol.y - tol.x);
else
return 1.0;
}
void main()
{
vec2 fragPos = gl_FragCoord.xy / iResolution.xy;
vec4 texColor0 = texture(text0, fragPos);
vec4 texColor1 = texture(text1, fragPos);
vec4 keyYUV = RGBtoYUV * chromaKey;
vec4 yuv = RGBtoYUV * texColor0;
float mask = 1.0 - colorclose(yuv.rgb, keyYUV.rgb, maskRange);
gl_FragColor = max(texColor0 - mask * chromaKey, 0.0) + texColor1 * mask;
}
P5 sketch code:
let theShader;
let cam;
let img;
function preload(){
theShader = loadShader('webcam.vert', 'webcam.frag');
img = loadImage('http://www.quadrochave.com/wp-content/uploads/elementor/thumbs/nodulo_bannersite_ptodu%C3%A7%C3%A3o2-mpe2nvmu8s8o2uqcd7b2oh3mnuv9up05ubby33shz4.png');
}
function setup() {
pixelDensity(1);
createCanvas(windowWidth, windowHeight, WEBGL);
noStroke();
cam = createCapture(VIDEO);
cam.size(windowWidth, windowHeight);
cam.hide();
}
function draw() {
// shader() sets the active shader with our shader
shader(theShader);
// passing cam as a texture
theShader.setUniform('tex0', cam);
theShader.setUniform('tex1', img);
// rect gives us some geometry on the screen
theShader.rect(0,0,width,height);
}
Test on Glitch
Shadertoy chromakey original fragment shader
The mayor issue is, that you didn't specify and set the uniform variable iResolution. But there are some more issues in the shader code (tex0 and tex1 rather than text0 and text1).
Fragment shader:
precision mediump float;
uniform sampler2D tex0;
uniform sampler2D tex1;
uniform vec2 iResolution;
mat4 RGBtoYUV = mat4(0.257, 0.439, -0.148, 0.0,
0.504, -0.368, -0.291, 0.0,
0.098, -0.071, 0.439, 0.0,
0.0625, 0.500, 0.500, 1.0 );
vec4 chromaKey = vec4(0.05, 0.63, 0.14, 1);
vec2 maskRange = vec2(0.005, 0.26);
float colorclose(vec3 yuv, vec3 keyYuv, vec2 tol)
{
float tmp = sqrt(pow(keyYuv.g - yuv.g, 2.0) + pow(keyYuv.b - yuv.b, 2.0));
if (tmp < tol.x)
return 0.0;
else if (tmp < tol.y)
return (tmp - tol.x)/(tol.y - tol.x);
else
return 1.0;
}
void main()
{
vec2 fragPos = gl_FragCoord.xy / iResolution.xy;
vec4 texColor0 = texture2D(tex0, fragPos);
vec4 texColor1 = texture2D(tex1, fragPos);
vec4 keyYUV = RGBtoYUV * chromaKey;
vec4 yuv = RGBtoYUV * texColor0;
float mask = 1.0 - colorclose(yuv.rgb, keyYUV.rgb, maskRange);
gl_FragColor = max(texColor0 - mask * chromaKey, 0.0) + texColor1 * mask;
}
Script:
let theShader;
let cam;
let img;
function setup() {
createCanvas(windowWidth, windowHeight, WEBGL);
theShader = loadShader('webcam.vert', 'webcam.frag');
img = loadImage('http://www.quadrochave.com/wp-content/uploads/elementor/thumbs/nodulo_bannersite_ptodu%C3%A7%C3%A3o2-mpe2nvmu8s8o2uqcd7b2oh3mnuv9up05ubby33shz4.png');
pixelDensity(1);
noStroke();
cam = createCapture(VIDEO);
cam.size(windowWidth, windowHeight);
cam.hide();
}
function draw() {
// shader() sets the active shader with our shader
shader(theShader);
// passing cam as a texture
theShader.setUniform('tex0', cam);
theShader.setUniform('tex1', img);
theShader.setUniform('iResolution', [width, height]);
// rect gives us some geometry on the screen
rect(0,0,width,height);
}
If the vertex shader provides the texture the coordinate:
// our vertex data
attribute vec3 aPosition;
attribute vec2 aTexCoord;
// lets get texcoords just for fun!
varying vec2 vTexCoord;
void main() {
// copy the texcoords
vTexCoord = aTexCoord;
// copy the position data into a vec4, using 1.0 as the w component
vec4 positionVec4 = vec4(aPosition, 1.0);
positionVec4.xy = positionVec4.xy * 2.0 - 1.0;
// send the vertex information on to the fragment shader
gl_Position = positionVec4;
}
then you can use this coordinate instead of gl_FragCoord.xy / iResolution.xy:
varying vec2 vTexCoord;
// [...]
void main() {
vec2 fragPos = vTexCoord.xy;
// [...]
}
I am trying to use FreeType (v2) library for 2D drawing in my OpenGL (v4.1) scene using C++. The text is rendered, but not correctly: one glyph hides another. How to prevent this behavior? My drawing code is below:
void CFreeTypeFont::Print(string sText, int x, int y, int iPXSize) {
glBindVertexArray(uiVAO);
glUniform1i(shp->uniform("gSampler"), 0);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
int iCurX = x, iCurY = y;
if(iPXSize == -1)iPXSize = iLoadedPixelSize;
float fScale = float(iPXSize)/float(iLoadedPixelSize);
FOR(i, ESZ(sText)) {
if(sText[i] == '\n') {
iCurX = x;
iCurY -= iNewLine*iPXSize/iLoadedPixelSize;
continue;
}
int iIndex = int(sText[i]);
iCurX += iBearingX[iIndex]*iPXSize/iLoadedPixelSize;
if(sText[i] != ' ') {
tCharTextures[iIndex].BindTexture(0);
glm::mat4 mModelView = glm::translate(glm::mat4(1.0f), glm::vec3(float(iCurX), float(iCurY), 0.0f));
mModelView = glm::scale(mModelView, glm::vec3(fScale));
glUniformMatrix4fv(shp->uniform("matrices.modelViewMatrix"),
1, GL_FALSE, glm::value_ptr(mModelView));
// Draw character
glDrawArrays(GL_TRIANGLE_STRIP, iIndex*4, 4);
}
iCurX += (iAdvX[iIndex]-iBearingX[iIndex])*iPXSize/iLoadedPixelSize;
}
glDisable(GL_BLEND);
}
The output is like this:
Must be "Hellow, World!"
If I disable blend mode, then picture clearly shows characters overlaying:
The same text without opacity
My vertex shader:
#version 410
uniform struct Matrices {
mat4 modelViewMatrix;
} matrices;
uniform float screenWidth;
uniform float screenHeight;
layout (location = 0) in vec2 inPosition;
layout (location = 1) in vec2 inCoord;
out vec2 texCoord;
void main() {
texCoord = inCoord;
vec4 pre = matrices.modelViewMatrix*vec4(inPosition, 0.0, 1.0);
pre.x = pre.x / screenWidth - 1;
pre.y = pre.y / screenHeight - 1;
gl_Position = pre;
}
The fragment shader:
#version 410
in vec2 texCoord;
out vec4 outputColor;
uniform sampler2D gSampler;
uniform vec4 vColor;
void main() {
vec4 vTexColor = texture(gSampler, texCoord);
outputColor = vTexColor*vColor;
}
The results look as if you haven't disabled depth testing when rendering the fonts. The depth-test prevents other glyphs from being drawn at the same position as previous ones.
To disable depth-testing call
glDisable(GL_DEPTH_TEST);