LWGJL Blending Issues - opengl

I'm trying to use a lightmap to create an effect like this, but doing it only with blending functions and rendering: https://www.youtube.com/watch?v=JkyGTZ0992s
Basically, have the light map have 100% transparency when it's all white and have it fade off from there, as you can see in the video above.
Right now, it's blending, but it's very dark as you can see from the picture (ignore starbound art): https://gyazo.com/fa0cecbe6085db3dc15291a86396af13
Is this because of my blending modes? I can't figure it out, hopefully you can! Thanks!
CODE:
public ShaderProgram shader;
public Texture texture;
public Sprite sprite;
public Sprite lightSprite;
public Texture lightText;
public ShapeRenderer shape;
SpriteBatch spriteBatch;
#Override
public void create() {
shape = new ShapeRenderer();
spriteBatch = new SpriteBatch();
texture = new Texture(Gdx.files.internal("image.png"));
sprite = new Sprite(texture);
lightText = new Texture(Gdx.files.internal("light.png"));
lightSprite = new Sprite(lightText);
ShaderProgram.pedantic = false;
shader = new ShaderProgram(Gdx.files.internal("shaders/passthrough.vert"), Gdx.files.internal("shaders/passthrough.frag"));
System.out.println(shader.getLog());
spriteBatch.setShader(shader);
}
#Override
public void render() {
Gdx.gl.glClearColor(0, 0, 0, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
/*
Draw image
*/
spriteBatch.setShader(shader);
spriteBatch.begin();
sprite.draw(spriteBatch);
spriteBatch.setShader(null);
spriteBatch.setBlendFunction(GL30.GL_DST_COLOR, GL30.GL_SRC_ALPHA);
lightSprite.draw(spriteBatch, 1.0f);
spriteBatch.end();
lightSprite.setPosition(Gdx.input.getX(), Gdx.input.getY());
spriteBatch.setBlendFunction(GL20.GL_SRC_ALPHA, GL20.GL_ONE_MINUS_SRC_ALPHA);
}
}`
The shader I'm using just makes the screen darker, here that is too:
VERT: http://pastebin.com/LkxCZGf6
FRAG: http://pastebin.com/dLwudnHx
lightmap:

Related

Cubemap texturing issue (D3D11, C++)

I have a texture problem with the cubemap I'm rendering and can't seem to figure it out. I've generated a cube map with direct x's texture tools and then read it using
D3DX11CreateShaderResourceViewFromFile(device, L"cubemap.dds", 0, 0, &fullcubemap, 0);
The cubemap texture is not high quality at all and it looks really stretched/distorted. I can definitely tell that the images used for the cubemap match correctly, but it's not great at all at the moment
I'm not sure why this is happening. Is it because my textures are too large/small or is it something else? If it's due to the size of the textures, what is a recommended texture size? I am using a sphere for the cubemap not a cube.
Edit:
Shader:
cbuffer SkyboxConstantBuffer {
float4x4 world;
float4x4 view;
float4x4 projection;
};
TextureCube gCubeMap;
SamplerState samTriLinearSam {
Filter = MIN_MAG_MIP_LINEAR;
AddressU = Wrap;
AddressV = Wrap;
};
struct VertexIn {
float4 position : POSITION;
};
struct VertexOut {
float4 position : SV_POSITION;
float4 spherePosition : POSITION;
};
VertexOut VS(VertexIn vin) {
VertexOut vout = (VertexOut)0;
vin.position.w = 1.0f;
vout.position = mul(vin.position, world);
vout.position = mul(vout.position, view);
vout.position = mul(vout.position, projection);
vout.spherePosition = vin.position;
return vout;
}
float4 PS(VertexOut pin) : SV_Target {
return gCubeMap.Sample(samTriLinearSam, pin.spherePosition);//float4(1.0, 0.5, 0.5, 1.0);
}
RasterizerState NoCull {
CullMode = None;
};
DepthStencilState LessEqualDSS {
DepthFunc = LESS_EQUAL;
};
technique11 SkyTech {
pass p0 {
SetVertexShader(CompileShader(vs_4_0, VS()));
SetGeometryShader(NULL);
SetPixelShader(CompileShader(ps_4_0, PS()));
SetRasterizerState(NoCull);
SetDepthStencilState(LessEqualDSS, 0);
}
}
Draw:
immediateContext->OMSetRenderTargets(1, &renderTarget, nullptr);
XMMATRIX sworld, sview, sprojection;
SkyboxConstantBuffer scb;
sview = XMLoadFloat4x4(&_view);
sprojection = XMLoadFloat4x4(&_projection);
sworld = XMLoadFloat4x4(&_world);
scb.world = sworld;
scb.view = sview;
scb.projection = sprojection;
immediateContext->IASetIndexBuffer(cubeMapSphere->getIndexBuffer(), DXGI_FORMAT_R32_UINT, 0);
ID3D11Buffer* vertexBuffer = cubeMapSphere->getVertexBuffer();
//ID3DX11EffectShaderResourceVariable * cMap;
////cMap = skyboxShader->GetVariableByName("gCubeMap")->AsShaderResource();
immediateContext->PSSetShaderResources(0, 1, &fullcubemap);//textures
//cMap->SetResource(fullcubemap);
immediateContext->IASetVertexBuffers(0, 1, &vertexBuffer, &stride, &offset);
immediateContext->VSSetShader(skyboxVertexShader, nullptr, 0);
immediateContext->VSSetConstantBuffers(0, 1, &skyboxConstantBuffer);
immediateContext->PSSetConstantBuffers(0, 1, &skyboxConstantBuffer);
immediateContext->PSSetShader(skyboxPixelShader, nullptr, 0);
immediateContext->UpdateSubresource(skyboxConstantBuffer, 0, nullptr, &scb, 0, 0);
immediateContext->DrawIndexed(cubeMapSphere->getIndexBufferSize(), 0, 0);
Initially I was planning to use this snippet to update the TextureCube variable in the shader
ID3DX11EffectShaderResourceVariable * cMap;
cMap = skyboxShader->GetVariableByName("gCubeMap")->AsShaderResource();
cMap->SetResource(fullcubemap);
But it seems that has no effect, and in fact, without the following line, the sphere I'm using for the cubemap textures with a texture used with another object in the scene, so perhaps there's something going on here? I'm not sure what though.
immediateContext->PSSetShaderResources(0, 1, &fullcubemap);//textures
Edit: Probably not the above, realised that if this wasn't updated, the old texture would be applied as it's never wiped after each draw.
Edit: Tried the cubemap with both a sphere and a cube, still the same texture issue.
Edit: Tried loading the shader resource view differently
D3DX11_IMAGE_LOAD_INFO loadSMInfo;
loadSMInfo.MiscFlags = D3D11_RESOURCE_MISC_TEXTURECUBE;
ID3D11Texture2D* SMTexture = 0;
hr = D3DX11CreateTextureFromFile(device, L"cubemap.dds",
&loadSMInfo, 0, (ID3D11Resource**)&SMTexture, 0);
D3D11_TEXTURE2D_DESC SMTextureDesc;
SMTexture->GetDesc(&SMTextureDesc);
D3D11_SHADER_RESOURCE_VIEW_DESC SMViewDesc;
SMViewDesc.Format = SMTextureDesc.Format;
SMViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURECUBE;
SMViewDesc.TextureCube.MipLevels = SMTextureDesc.MipLevels;
SMViewDesc.TextureCube.MostDetailedMip = 0;
hr = device->CreateShaderResourceView(SMTexture, &SMViewDesc, &fullcubemap);
Still produces the same output, any ideas?
Edit: Tried increasing the zfar distance and the texture remains the exact same no matter what value I put.
Example with second texture with increased view distance.
This texture is used on another object in my scene and comes out fine.
Edit: I have been trying to mess with the scaling of the texture/object
To achieve this I used
vin.position = vin.position * 50.0f;
This is beginning to look sort of like how it should, however, when I turn my camera angle, the image disappears so I obviously know this isn't correct, but if I could just scale the image per pixel or per vertex properly, I'm sure I could get the end result.
Edit:
I can confirm the cubemap is rendering correctly, I was ignoring the view/projection space and just using world and managed to get this, which is the high quality image i'm after, just not correct. Yes the faces are incorrect, but I'm not fussed about that now, it's easy enough to swap them around, I just need to get it rendering with this quality, in the correct space.
When in camera space does it take into account whether or not it's the outside/inside of the sphere? If my textures were over the outside of the sphere and I have the view from the inside, it's not going to look the same?
Issue is with your texture size, its small, you are applying it on larger surface, Make larger textures with more pixels
Its confirm that zfar and scaling has nothing to do with it.
Finally found the issue, silly mistake.
scb.world = XMMatrixTranspose(sworld);
scb.view = XMMatrixTranspose(sview);
scb.projection = XMMatrixTranspose(sprojection);

Framebuffer in Libgdx, using glclearcolor() clears whole screen

I'm using glClearColor inside begin() and end() in framebuffer,
but it is clearing the whole screen color, Am I doing something wrong?
public class FrameBufferTest implements ApplicationListener{
OrthographicCamera camera;
SpriteBatch batcher;
FrameBuffer fbo;
Texture tex;
#Override
public void create() {
batcher = new SpriteBatch();
camera = new OrthographicCamera(800, 480);
camera.position.set(camera.viewportWidth/2f, camera.viewportHeight/2f, 0);
fbo = new FrameBuffer(Format.RGBA8888, 100, 100,false);
camera.position.set(camera.viewportWidth/2f, camera.viewportHeight/2f, 0);
tex = new Texture("data/bg.png");
}
#Override
public void render() {
GL20 gl = Gdx.graphics.getGL20();
gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
gl.glClearColor(0.5f,0.5f,0.5f,0.5f); // grey color
camera.update();
batcher.setProjectionMatrix(camera.combined);
batcher.enableBlending();
batcher.begin();
batcher.draw(tex, 0, 0, 256, 256);
batcher.end();
fbo.begin();
gl.glClearColor(1f,0,0,1f);
gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
fbo.end();
batcher.begin();
batcher.draw(fbo.getColorBufferTexture(), 512, 0, 100, 100);
batcher.end();
}
Swap these two lines at the beginning of your render method:
gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
gl.glClearColor(0.5f,0.5f,0.5f,0.5f); // grey color
In the order you have them, it is clearing the color using the most recently set clear color, which is the red that you set farther down in your render method (since this is a loop).

Rendering to texture hides the front face,something's wrong with the z buffer?

I want to render my scene to a texture and apply a blur shader to this texture .The problem is that when I draw back my texture the front faces of the cubes are invisible
without supersampling
with supersampling
*Ignore the opaque thing around the cube in both photos.I double render the cube once with less alpha and more scale ,I disabled this but I have the same problem.
For some reason I am using the y as z and z as y,so the front face of the cube has less y than the back face(instead of z) ,I am guessing something is wrong with the z-buffer.
The render to texture code:
public class RenderOnTexture {
private float m_fboScaler = 1f;
private boolean m_fboEnabled = true;
private FrameBuffer m_fbo = null;
private TextureRegion m_fboRegion = null;
public RenderOnTexture(float scale) {
int width = (int) (Gdx.graphics.getWidth()*scale);
int height = (int) (Gdx.graphics.getHeight()*scale);
m_fbo = new FrameBuffer(Format.RGB565, (int)(width * m_fboScaler), (int)(height * m_fboScaler), false);
m_fboRegion = new TextureRegion(m_fbo.getColorBufferTexture());
m_fboRegion.flip(false,false);
}
public void begin(){
if(m_fboEnabled)
{
m_fbo.begin();
Gdx.gl.glClearColor(0, 0,0,0);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
}
}
public TextureRegion end(){
if(m_fbo != null)
{
m_fbo.end();
return m_fboRegion;
}
return null;
}
}
The Boolean argument in the FrameBuffer enables a depth buffer attachment. And the depth buffer must be cleared along with the color buffer.

Libgdx - How to draw filled rectangle in the right place in scene2d?

I am using scene2d. Here is my code:
group.addActor(new Actor() {
#Override
public Actor hit(float arg0, float arg1) {
return null;
}
#Override
public void draw(SpriteBatch batch, float arg1) {
batch.end();
shapeRenderer.begin(ShapeType.FilledRectangle);
shapeRenderer.setColor(Color.RED);
shapeRenderer.filledRect(0, 0, 300, 20);
shapeRenderer.end();
batch.begin();
}
});
The problem is that it draws this rectangular relative to screen (x = 0, y = 0), but I need it to be drawn relative to my group. But if I draw other entities with:
batch.draw(texture, 0, 0, width, height);
it correctly draws at (x = 0, y = 0) relative my group (0,0 pixels from left-bottom corner of the group).
Any suggestions how can I implement shape drawing in scene2d? And can someone can explain why these two calls work differently?
ShapeRenderer has its own transform matrix and projection matrix. These are separate to those in the SpriteBatch that the scene2d Stage uses. If you update the ShapeRenderer's matrices to match those that scene2d is using when Actor.draw() is called then you should get the results that you want.
As Rod Hyde mentions, ShapeRenderer has its own transform matrix and projection matrix.
So you would have to get the SpriteBatch's projection Matrix first.
I am not sure if there is an elegant way to do it, I did it like this:
public class myActor extends Actor{
private ShapeRenderer shapeRenderer;
static private boolean projectionMatrixSet;
public myActor(){
shapeRenderer = new ShapeRenderer();
projectionMatrixSet = false;
}
#Override
public void draw(SpriteBatch batch, float alpha){
batch.end();
if(!projectionMatrixSet){
shapeRenderer.setProjectionMatrix(batch.getProjectionMatrix());
}
shapeRenderer.begin(ShapeType.Filled);
shapeRenderer.setColor(Color.RED);
shapeRenderer.rect(0, 0, 50, 50);
shapeRenderer.end();
batch.begin();
}
}
The best solution for me. Cause when you using ShapeRenderer it's doesn't react on moving/zooming camera.
public class Rectangle extends Actor {
private Texture texture;
public Rectangle(float x, float y, float width, float height, Color color) {
createTexture((int)width, (int)height, color);
setX(x);
setY(y);
setWidth(width);
setHeight(height);
}
private void createTexture(int width, int height, Color color) {
Pixmap pixmap = new Pixmap(width, height, Pixmap.Format.RGBA8888);
pixmap.setColor(color);
pixmap.fillRectangle(0, 0, width, height);
texture = new Texture(pixmap);
pixmap.dispose();
}
#Override
public void draw(Batch batch, float parentAlpha) {
Color color = getColor();
batch.setColor(color.r, color.g, color.b, color.a * parentAlpha);
batch.draw(texture, getX(), getY(), getWidth(), getHeight());
}
}
You need to convert the Actor's local coordinates into screen coordinates. Assuming your stage is full-screen, you can just use Actor.localToStageCoordinates:
vec.set(getX(), getY());
this.localToStageCoordinates(/* in/out */ vec);
shapeRenderer.filledRect(vec.x, vec.y, getWidth(), getHeight());
Where vec is a private Vector2d (you don't want to allocate a new one on each render call).
This is also assuming that your ShapeRenderer is defined to be map to the full screen (which is the default).
Also, if you switch away from the ShapeRenderer and back to the SpriteBatch, note that the batch is already adjusted to Actor coordinates (and thus you can use getX() and getY() directly with batch.draw(...).
If your are using the ShapeRenderer don't forget using setProjectionMatrix() and setTransformMatrix() methods...
A sample of draw circle inside an Actor on draw method :
#Override public void draw(Batch batch, float parentAlpha) {
batch.end();
if (shapeRenderer == null) {
shapeRenderer = new ShapeRenderer();
}
Gdx.gl.glEnable(GL20.GL_BLEND);
shapeRenderer.setProjectionMatrix(batch.getProjectionMatrix());
shapeRenderer.setTransformMatrix(batch.getTransformMatrix());
shapeRenderer.setColor(mColor.r, mColor.g, mColor.b, mColor.a * parentAlpha);
shapeRenderer.begin(ShapeRenderer.ShapeType.Filled);
shapeRenderer.circle(getX() + getWidth()/2 , getY() + getHeight()/2 , Math.min(getWidth(),getHeight())/2 );
shapeRenderer.end();
Gdx.gl.glDisable(GL20.GL_BLEND);
batch.begin();
}

Rendering texture to simple shape using QGLSceneNode

I am attempting to render a texture to a simple shape using QBuilder and QGeometryData but I can not get the texture to display, what am I doing wrong?
I have created a QGLSceneNode which builds the texture and attempts to use generateTextureCoordinates to map the texture coordinates.
EDIT
Mapping the texture coordinates by hand seems to work, but generateTextureCoordinates should work.
[My class is a child of QGLView]
vtkQtViewer::vtkQtViewer(QWidget *parent) :
QGLView(parent), scene(new QGLSceneNode(this))
{
// in the constructor construct a builder on the stack
QGLBuilder builder;
QGeometryData triangle;
QVector3D a(2, 2, 0);
QVector3D b(-2, 2, 0);
QVector3D c(2, -2, 0);
QVector3D d(-2, -2, 0);
triangle.appendVertex(a,b,c);
triangle.appendVertex(b,d,c);
triangle.generateTextureCoordinates();
// When adding geometry,
//QGLBuilder automatically creates lighting normals
builder << triangle;
// obtain the scene from the builder
QGLSceneNode* can = builder.finalizedSceneNode();
// apply effects at app initialization time
QGLMaterial *mat = new QGLMaterial;
mat->setDiffuseColor(Qt::red);
QUrl url;
url.setPath(QLatin1String("qtlogo.png"));
url.setScheme(QLatin1String("file"));
mat->setTextureUrl(url);
can->setMaterial(mat);
scene->addNode(can);
}
vtkQtViewer::~vtkQtViewer()
{
delete scene;
}
void vtkQtViewer::initializeGL(QGLPainter *painter)
{
painter->setStandardEffect(QGL::LitDecalTexture2D);
}
void vtkQtViewer::paintGL(QGLPainter *painter)
{
scene->draw(painter);
}}