How do I get textures to work in OpenGL? - c++
I'm using the tutorials on http://arcsynthesis.org/gltut/ to learn OpenGL, it's required, I have to use it. Mostly I want to apply the textures from Tutorial 15 onto objects in tutorial 7 (world with UBO).
For now it seemed like the textures only work when mipmaps are turned on. This comes with a downside: The only mipmap used is the one with an index of zero, and that's the 1 colored 1x1 pixel one. I tried setting the minimum level of a mipmap higher or turning off mipmaps entirely, but even that doesn't fix thing, because then everything turns pitch black. Now I'll list the most important parts of my program
EDIT: I guess I'll add more details...
The vertex shader has something like this:
#version 330
layout(location = 0) in vec4 position;
layout(location = 1) in vec4 color;
layout(location = 2) in vec3 normal;
//Added these later
layout(location = 5) in vec2 texCoord;
out vec2 colorCoord;
smooth out vec4 interpColor;
out vec3 vertexNormal;
out vec3 modelSpacePosition;
out vec3 cameraSpacePosition;
uniform mat4 worldToCameraMatrix;
uniform mat4 modelToWorldMatrix;
uniform mat3 normalModelToCameraMatrix;
uniform vec3 dirToLight;
uniform vec4 lightIntensity;
uniform vec4 ambientIntensity;
uniform vec4 baseColor;
uniform mat4 cameraToClipMatrix;
void main()
{
vertexNormal = normal;
vec3 normCamSpace = normalize(normalModelToCameraMatrix * vertexNormal);
cameraSpacePosition = normCamSpace;
float cosAngIncidence = dot(normCamSpace, dirToLight);
cosAngIncidence = clamp(cosAngIncidence, 0, 1);
modelSpacePosition.x = position.x;
modelSpacePosition.y = position.y;
modelSpacePosition.z = position.z;
vec4 temp = modelToWorldMatrix * position;
temp = worldToCameraMatrix * temp;
gl_Position = cameraToClipMatrix * temp;
interpColor = ((lightIntensity * cosAngIncidence) + (ambientIntensity)) * baseColor;
colorCoord= texCoord ;
}
The fragment shader like this:
#version 330
in vec3 vertexNormal;
in vec3 modelSpacePosition;
smooth in vec4 interpColor;
uniform vec3 modelSpaceLightPos;
uniform vec4 lightIntensity2;
uniform vec4 ambientIntensity2;
out vec4 outputColor;
//Added later
in vec2 colorCoord;
uniform sampler2D colorTexture;
void main()
{
vec3 lightDir2 = normalize(modelSpacePosition - modelSpaceLightPos);
float cosAngIncidence2 = dot(normalize(vertexNormal), lightDir2);
cosAngIncidence2 = clamp(cosAngIncidence2, 0, 1);
float light2DistanceSqr = dot(modelSpacePosition - modelSpaceLightPos, modelSpacePosition - modelSpaceLightPos);
//added
vec4 texture2 = texture(colorTexture, colorCoord);
outputColor = ((ambientIntensity2 + (interpColor*2))/4) +
((((interpColor) * lightIntensity2/200 * cosAngIncidence2) + (ambientIntensity2* interpColor ))
/( ( sqrt(light2DistanceSqr) + light2DistanceSqr)/200 ));
//No outputColor for texture testing
outputColor = texture2 ;
}
}
Those were both shaders. And here are the parts added to the .cpp:
#include <glimg/glimg.h>
#include "../framework/directories.h"
[...]
const int g_colorTexUnit = 0;
GLuint g_checkerTexture = 0;
And here's the loader for the texture:
void LoadCheckerTexture()
{
try
{
std::string filename(LOCAL_FILE_DIR);
filename += "checker.dds";
std::auto_ptr<glimg::ImageSet>
pImageSet(glimg::loaders::dds::LoadFromFile(filename.c_str()));
glGenTextures(1, &g_checkerTexture);
glBindTexture(GL_TEXTURE_2D, g_checkerTexture);
glimg::SingleImage image = pImageSet->GetImage(0, 0, 0);
glimg::Dimensions dims = image.GetDimensions();
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, dims.width, dims.height, 0, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, image.GetImageData());
glBindTexture(GL_TEXTURE_2D, 0);
}
catch(std::exception &e)
{
printf("%s\n", e.what());
throw;
}
}
Naturally I've got this in void init():
LoadCheckerTexture();
And then when rendering the object:
glActiveTexture(GL_TEXTURE0 + g_colorTexUnit);
glBindTexture(GL_TEXTURE_2D,g_checkerTexture);
g_pLeftMesh->Render();
glBindSampler(g_colorTexUnit, 0);
glBindTexture(GL_TEXTURE_2D, 0);
With all of this, I get put pitch black for everything, however when I change the outputColor equation into "texture + outputColor;", everything looks normal. I have no idea what I'm doing wrong here. A friend tried to help me, we removed some unnecessairy stuff, but we got nothing running.
Ok guys, I've worked on this whole thing, and did manage to somehow get it running. First off I had to add samplers:
GLuint g_samplers;
//Add Later
void CreateSamplers()
{
glGenSamplers(1, &g_samplers);
glSamplerParameteri(g_samplers, GL_TEXTURE_WRAP_S, GL_REPEAT);
glSamplerParameteri(g_samplers, GL_TEXTURE_WRAP_T, GL_REPEAT);
//Linear mipmap Nearest
glSamplerParameteri(g_samplers, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glSamplerParameteri(g_samplers, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
}
I also added this to the file thing:
glimg::OpenGLPixelTransferParams xfer = glimg::GetUploadFormatType(pImageSet->GetFormat(), 0);
glimg::SingleImage image = pImageSet->GetImage(0, 0, 0);
glimg::Dimensions dims = image.GetDimensions();
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, dims.width, dims.height, 0,
xfer.format, xfer.type, image.GetImageData());
The xfer variable does get the format and type adjusted to the dds.
Also the render code got turned into this:
//Added necessary
glActiveTexture(GL_TEXTURE0 + g_colorTexUnit);
glBindTexture(GL_TEXTURE_2D,g_checkerTexture);
glBindSampler(g_colorTexUnit, g_samplers);
g_pLeftMesh->Render();
glBindSampler(g_colorTexUnit, 0);
glBindTexture(GL_TEXTURE_2D, 0);
And of course at the end of init() I needed to add the CreateSamplers thing:
//Added this later
LoadCheckerTexture();
CreateSamplers();
I'm sorry for all the trouble with all this, but guess OpenGL really is just this confusing and it was just dumb luck that I got it right. Just posting this so that people know
Your fail to add textures may be caused by:
Have you add texture coordinates to objects? (this is the most probable cause, because you are adding textures to non textured tutorial), add textures to VAO.
Did you add uniform textureunit (Sampler2D)? (it must be uniform, else texturing will not work properly)
Is your texture loaded,binded,enabled (GL_TEXTURE_2D) ?
Is your active texture unit - 0? if not change layout/multitexture coords or set active texture 0
This two codes are simple texturing shaders (texture unit 0) no special things (like light,blend,bump,...):
tm_l2g is transformation local obj space -> world space (Modelview)
tm_g2s is transformation world space -> screen space (Projection)
pos are vertex coordinates
txt are texture coordinates
col are colors
Do not forget to change uniform names and layout locations to yours.
Vertex:
//------------------------------------------------------------------
#version 420 core
//------------------------------------------------------------------
uniform mat4x4 tm_l2g;
uniform mat4x4 tm_g2s;
layout(location=0) in vec3 pos;
layout(location=1) in vec4 col;
layout(location=2) in vec2 txr;
out smooth vec4 pixel_col;
out smooth vec2 pixel_txr;
//------------------------------------------------------------------
void main(void)
{
vec4 p;
p.xyz=pos;
p.w=1.0;
p=tm_l2g*p;
p=tm_g2s*p;
gl_Position=p;
pixel_col=col;
pixel_txr=txr;
}
//------------------------------------------------------------------
fragment:
//------------------------------------------------------------------
#version 420 core
//------------------------------------------------------------------
in smooth vec4 pixel_col;
in smooth vec2 pixel_txr;
uniform sampler2D txr_texture0;
out layout(location=0) vec4 frag_col;
//------------------------------------------------------------------
void main(void)
{
vec4 col;
col=texture(txr_texture0,pixel_txr.st);
frag_col=col*pixel_col;
}
//------------------------------------------------------------------
[edit1] CPU old style OpenGL render code (initializations are not included its only render code they can be found here)
//------------------------------------------------------------------
// set modelview,projection,textures,bind GLSL programs...
GLfloat a=10.0,z=0.0;
glColor3f(1.0,1.0,1.0);
glBegin(GL_QUADS);
// textured quad
glTexCoord2f(0.0,0.0); glVertex3f(-a,-a,z);
glTexCoord2f(0.0,1.0); glVertex3f(-a,+a,z);
glTexCoord2f(1.0,1.0); glVertex3f(+a,+a,z);
glTexCoord2f(1.0,0.0); glVertex3f(+a,-a,z);
// reverse order quad to be shore that at least one passes by CULL_FACE
glTexCoord2f(1.0,0.0); glVertex3f(+a,-a,z);
glTexCoord2f(1.0,1.0); glVertex3f(+a,+a,z);
glTexCoord2f(0.0,1.0); glVertex3f(-a,+a,z);
glTexCoord2f(0.0,0.0); glVertex3f(-a,-a,z);
glEnd();
//------------------------------------------------------------------
[edit2] ok here goes VAO/VBO render code,...
//------------------------------------------------------------------------------
// enum of VBO locations (it is also your layout location) I use enums for simple in code changes
enum _vbo_enum
{
_vbo_pos=0, // glVertex
_vbo_col, // glColor
_vbo_tan, // glNormal
_vbo_unused0, // unused (at least i dont see anything at this location in your code)
_vbo_unused1, // unused (at least i dont see anything at this location in your code)
_vbo_txr, // glTexCoord
_vbos
};
//------------------------------------------------------------------------------
// 'global' names and size for OpenGL mesh in VAO/VBO ... similar ot texture names/handles
GLuint vao[1],vbo[_vbos],num_pnt=0;
//------------------------------------------------------------------------------
void VAO_init_cube() // call this before VAO use,...but after OpenGL init !
{
//[1] first you need some model to render (mesh), here is a simple cube
// size,position of cube - change it that it is visible in your scene
const GLfloat a=1.0,x=0.0,y=0.0,z=0.0;
// cube points 3f x,y,z
GLfloat mesh_pos[]=
{
x-a,y-a,z-a,x-a,y+a,z-a,x+a,y+a,z-a,x+a,y-a,z-a,
x-a,y-a,z+a,x-a,y+a,z+a,x+a,y+a,z+a,x+a,y-a,z+a,
x-a,y-a,z-a,x-a,y-a,z+a,x+a,y-a,z+a,x+a,y-a,z-a,
x-a,y+a,z-a,x-a,y+a,z+a,x+a,y+a,z+a,x+a,y+a,z-a,
x-a,y-a,z-a,x-a,y+a,z-a,x-a,y+a,z+a,x-a,y-a,z+a,
x+a,y-a,z-a,x+a,y+a,z-a,x+a,y+a,z+a,x+a,y-a,z+a,
};
// cube colors 3f r,g,b
GLfloat mesh_col[]=
{
0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,
0.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,
0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,
0.0,1.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,
1.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,
};
// cube normals 3f x,y,z
GLfloat mesh_tan[]=
{
-0.6,-0.6,-0.6,-0.6,+0.6,-0.6,+0.6,+0.6,-0.6,+0.6,-0.6,-0.6,
-0.6,-0.6,+0.6,-0.6,+0.6,+0.6,+0.6,+0.6,+0.6,+0.6,-0.6,+0.6,
-0.6,-0.6,-0.6,-0.6,-0.6,+0.6,+0.6,-0.6,+0.6,+0.6,-0.6,-0.6,
-0.6,+0.6,-0.6,-0.6,+0.6,+0.6,+0.6,+0.6,+0.6,+0.6,+0.6,-0.6,
-0.6,-0.6,-0.6,-0.6,+0.6,-0.6,-0.6,+0.6,+0.6,-0.6,-0.6,+0.6,
+0.6,-0.6,-0.6,+0.6,+0.6,-0.6,+0.6,+0.6,+0.6,+0.6,-0.6,+0.6,
};
// cube texture coords 2f s,t
GLfloat mesh_txr[]=
{
0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,
};
// init VAO/VBO
glGenVertexArrays(1,vao); // allocate 1 x VAO
glGenBuffers(_vbos,vbo); // allocate _vbos x VBO
// copy mesh to VAO/VBO ... after this you do not need the mesh anymore
GLint i,sz,n; // n = number of numbers per 1 entry
glBindVertexArray(vao[0]);
num_pnt=sizeof(mesh_pos)/(sizeof(GLfloat)*3); // num of all points in mesh
i=_OpenGLVAOgfx_pos; n=3; sz=sizeof(GLfloat)*n;
glBindBuffer(GL_ARRAY_BUFFER,vbo[i]);
glBufferData(GL_ARRAY_BUFFER,sz*num_pnt,mesh_pos,GL_STATIC_DRAW);
glEnableVertexAttribArray(i);
glVertexAttribPointer(i,n,GL_FLOAT,GL_FALSE,0,0);
i=_OpenGLVAOgfx_col; n=3; sz=sizeof(GLfloat)*n;
glBindBuffer(GL_ARRAY_BUFFER,vbo[i]);
glBufferData(GL_ARRAY_BUFFER,sz*num_pnt,mesh_col,GL_STATIC_DRAW);
glEnableVertexAttribArray(i);
glVertexAttribPointer(i,n,GL_FLOAT,GL_FALSE,0,0);
i=_OpenGLVAOgfx_tan; n=3; sz=sizeof(GLfloat)*n;
glBindBuffer(GL_ARRAY_BUFFER,vbo[i]);
glBufferData(GL_ARRAY_BUFFER,sz*num_pnt,mesh_tan,GL_STATIC_DRAW);
glEnableVertexAttribArray(i);
glVertexAttribPointer(i,n,GL_FLOAT,GL_FALSE,0,0);
i=_OpenGLVAOgfx_txr; n=2; sz=sizeof(GLfloat)*n;
glBindBuffer(GL_ARRAY_BUFFER,vbo[i]);
glBufferData(GL_ARRAY_BUFFER,sz*num_pnt,mesh_txr,GL_STATIC_DRAW);
glEnableVertexAttribArray(i);
glVertexAttribPointer(i,n,GL_FLOAT,GL_FALSE,0,0);
glBindVertexArray(0);
}
//------------------------------------------------------------------------------
void VAO_draw() // call this to draw your mesh,... need to enable and bind textures,... before use
{
glDisable(GL_CULL_FACE);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glBindVertexArray(vao[0]);
glEnableVertexAttribArray(_vbo_pos);
glEnableVertexAttribArray(_vbo_col);
glEnableVertexAttribArray(_vbo_tan);
glDisableVertexAttribArray(_vbo_unused0);
glEnableVertexAttribArray(_vbo_txr);
glDrawArrays(GL_QUADS,0,num_pnt);
glDisableVertexAttribArray(_vbo_pos);
glDisableVertexAttribArray(_vbo_col);
glDisableVertexAttribArray(_vbo_tan);
glDisableVertexAttribArray(_vbo_unused0);
glDisableVertexAttribArray(_vbo_unused1);
glDisableVertexAttribArray(_vbo_txr);
glBindVertexArray(0);
}
//------------------------------------------------------------------------------
void VAO_exit() // clean up ... call this when you do not need VAO/VBO anymore
{
glDisableVertexAttribArray(_vbo_pos);
glDisableVertexAttribArray(_vbo_col);
glDisableVertexAttribArray(_vbo_tan);
glDisableVertexAttribArray(_vbo_unused0);
glDisableVertexAttribArray(_vbo_unused1);
glDisableVertexAttribArray(_vbo_txr);
glBindVertexArray(0);
glDeleteVertexArrays(1,vao);
glDeleteBuffers(_vbos,vbo);
}
//------------------------------------------------------------------------------
[edit3] if you are win32/64 user you can try my IDE for GLSL
It is very simple and easy to use, but cannot change texture/attrib locations. Press [F1] for help,... [F9] for run [F10] for return to normal OpenGL mode. Also txt-editor is little buggy sometimes but it is enough for my purpose.
GLSL IDE
Related
Sample from GL_TEXTURE_1D in fragment shader
Been trying to sample from a 1D texture (.png), got a model with the correct texture coordinates and all but I just can't get the texture to show up. The geometry is rendering just black, there must be something I have missunderstood about textures in OpenGL but can't see it. Any pointers? C++ // Setup GLint texCoordAttrib = glGetAttribLocation(batch_shader_program, "vTexCoord"); glVertexAttribPointer(texCoordAttrib, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex<float>), (const void *)offsetof(Vertex<float>, texCoord)); glEnableVertexAttribArray(texCoordAttrib); // Loading GLuint load_1d_texture(std::string filepath) { SDL_Surface *image = IMG_Load(filepath.c_str()); int width = image->w; GLuint texture; glGenTextures(1, &texture); glBindTexture(GL_TEXTURE_1D, texture); glTexImage2D(GL_TEXTURE_1D, 0, GL_RGBA, width, 0, GL_RGBA, GL_UNSIGNED_BYTE, image->pixels); SDL_FreeSurface(image); return texture; } // Rendering glUseProgram(batch.gl_program); glBindTexture(GL_TEXTURE_1D, batch.mesh.texture.gl_texture_reference); glDraw*** Vertex Shader #version 330 core in vec3 position; in vec4 vColor; in vec3 normal; // Polygon normal in vec2 vTexCoord; // Model in mat4 model; out vec4 fColor; out vec3 fTexcoord; // View or a.k.a camera matrix uniform mat4 camera_view; // Projection or a.k.a perspective matrix uniform mat4 projection; void main() { gl_Position = projection * camera_view * model * vec4(position, 1.0); fTexcoord = vec3(vTexCoord, 1.0); } Fragment Shader #version 330 core in vec4 fColor; out vec4 outColor; in vec3 fTexcoord; // passthrough shading for interpolated textures uniform sampler1D sampler; void main() { outColor = texture(sampler, fTexcoord.x); }
glBindTexture(GL_TEXTURE_2D, texture); glBindTexture(GL_TEXTURE_1D, batch.mesh.texture.gl_texture_reference); Assuming that these two lines of code are talking about the same OpenGL object, you cannot do that. A texture that uses the 2D texture target is a 2D texture. It is not a 1D texture, nor is it a 2D array texture with one layer or a 3D texture with depth 1. It is a 2D texture. Once you bind a texture object after generating it, the texture's target is fixed. You can use view textures to create a view of the same storage with different targets, but the original texture object itself is unaffected by this. And you can't create a 1D view of a 2D texture. You should have gotten a GL_INVALID_OPERATION error when you tried to bind the 2D texture as if it were 1D. You should always check for errors when you run into OpenGL problems.
In the end there was no problem, only a bug in the texture coordinate loading (it took the wrong indices from the vertices..)..
Simple GLSL render chain doesn't draw reliably
I have a simple compositing system which is supposed to render different textures and a background texture into an FBO. It also renders some primitives. Here's an example: I'm rendering using a simple GLSL shader for the texture and another one for the primitive. Also, I'm waiting for each shader to finish using glFinish after each glDrawArrays call. So basically: tex shader (background tex) tex shader (tex 1) primitive shader tex shader (tex 2) tex shader (tex 3) When I only do this once, it works. But if I do another render pass directly after the first one finished, some textures just aren't rendered. The primitive however is always rendered. This doesn't happen always, but the more textures I draw, the more often this occurs. Thus, I'm assuming that this is a timing problem. I tried to troubleshoot for the last two days and I just can't find the reason for this. I'm 100% sure that the textures are always valid (I downloaded them using glGetTexImage to verify). Here are my texture shaders. Vertex shader: #version 150 uniform mat4 mvp; in vec2 inPosition; in vec2 inTexCoord; out vec2 texCoordV; void main(void) { texCoordV = inTexCoord; gl_Position = mvp * vec4(inPosition, 0.0, 1.0); } Fragment shader: #version 150 uniform sampler2D tex; in vec2 texCoordV; out vec4 fragColor; void main(void) { fragColor = texture(tex, texCoordV); } And here's my invocation: NSRect drawDestRect = NSMakeRect(xPos, yPos, str.texSize.width, str.texSize.height); NLA_VertexRect rect = NLA_VertexRectFromNSRect(drawDestRect); int texID = 0; NLA_VertexRect texCoords = NLA_VertexRectFromNSRect(NSMakeRect(0.0f, 0.0f, 1.0f, 1.0f)); NLA_VertexRectFlipY(&texCoords); [self.texApplyShader.arguments[#"inTexCoord"] setValue:&texCoords forNumberOfVertices:4]; [self.texApplyShader.arguments[#"inPosition"] setValue:&rect forNumberOfVertices:4]; [self.texApplyShader.arguments[#"tex"] setValue:&texID forNumberOfVertices:1]; GetError(); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, str.texName); glDrawArrays(GL_TRIANGLE_FAN, 0, 4); glFinish(); The setValue:forNumberOfCoordinates: function is an object-based wrapper around OpenGL's parameter application functions. It basically does this: glBindVertexArray(_vertexArrayObject); glBindBuffer(GL_ARRAY_BUFFER, _vertexBufferObject); glBufferData(GL_ARRAY_BUFFER, bytesForGLType * numVertices, value, GL_DYNAMIC_DRAW); glEnableVertexAttribArray((GLuint)self.boundLocation); glVertexAttribPointer((GLuint)self.boundLocation, numVectorElementsForType, GL_FLOAT, GL_FALSE, 0, 0); Here are two screenshots of what it should look like (taken after first render pass) and what it actually looks like (taken after second render pass): https://www.dropbox.com/s/0nmquelzo83ekf6/GLRendering_issues_correct.png?dl=0 https://www.dropbox.com/s/7aztfba5mbeq5sj/GLRendering_issues_wrong.png?dl=0 (in this example, the background texture is just black) The primitive shader is as simple as it gets: Vertex: #version 150 uniform mat4 mvp; uniform vec4 inColor; in vec2 inPosition; out vec4 colorV; void main (void) { colorV = inColor; gl_Position = mvp * vec4(inPosition, 0.0, 1.0); } Fragment: #version 150 in vec4 colorV; out vec4 fragColor; void main(void) { fragColor = colorV; }
Found the issue... I didn't realize that the FBO is drawn to the screen already after the first render pass. This happens on a different thread and wasn't locked properly. Apparently the context was switched while the compositing took place which explains why it caused different issues randomly depending on when the second thread switched the context.
OpenGL - displacement vertex shader
I'm working with OpenTK wrapper and C# and trying to use displacement vertex shaders to generate 3D models. I can run dummie shaders to render cubes and triangles, but now I want to create a 3D grid using texture data. For first attempts I created an image (.png) with different areas using red and black colors. For reference, here is the texture-loading function: loadImage(Bitmap image) { int texID = GL.GenTexture(); GL.BindTexture(TextureTarget.Texture2D, texID); System.Drawing.Imaging.BitmapData data = image.LockBits(new System.Drawing.Rectangle(0, 0, image.Width, image.Height), System.Drawing.Imaging.ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb); GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, data.Width, data.Height, 0, OpenTK.Graphics.OpenGL.PixelFormat.Bgra, PixelType.UnsignedByte, data.Scan0); image.UnlockBits(data); GL.GenerateMipmap(GenerateMipmapTarget.Texture2D); return texID; } As far as I read in documentation after loading the texture, I bind both arrays (vertex position and texcoords), and call GL.UseProgram. I assume then the texture is binded and loaded, isn't it? GL.ActiveTexture(TextureUnit.Texture0); GL.BindTexture(TextureTarget.Texture2D, objects[0].TextureID); int loc = GL.GetUniformLocation(shaders[activeShader].ProgramID, "maintexture"); GL.Uniform1(loc, 0); GL.UniformMatrix4(shaders[activeShader].GetUniform("modelview"), false, ref objects[0].ModelViewProjectionMatrix); vertex shader: #version 330 in vec3 vPosition; in vec2 texcoord; out vec2 f_texcoord; uniform mat4 modelview; uniform sampler2D maintexture; void main() { vec3 newPos = vPosition; newPos.y += texture(maintexture, texcoord).r; gl_Position = modelview * (vec4(newPos, 1.0) ); f_texcoord = texcoord; } What I'm trying to achieve is that the red areas in the input texture appear as elevated vertices, and black areas produce vertices at 'ground' level, but I'm getting a perfectly flat grid and I can't understand why.
GLSL How to show normals with Geometry shader?
I have vertex shader #version 330 core layout(location = 0) in vec3 VertexPosition; layout(location = 1) in vec2 VertexUV; layout(location = 2) in vec3 VertexNormal; out VS_GS_VERTEX { vec2 UV; vec3 vs_worldpos; vec3 vs_normal; } vertex_out; uniform mat4 proj_matrix; uniform mat4 model_matrix; void main(void) { gl_Normal = VertexNormal; gl_Position = proj_matrix * vec4(VertexPosition, 1.0); vertex_out.UV = VertexUV; //VertexPosition.xy; vertex_out.vs_worldpos = gl_Position.xyz; vertex_out.vs_normal = mat3(model_matrix) * gl_Normal; } and fragment shader #version 330 core in GS_FS_VERTEX { vec2 UV; vec3 vs_worldpos; vec3 vs_normal; } vertex_in; // Values that stay constant for the whole mesh. uniform sampler2D sampler0; uniform sampler2D sampler1; uniform sampler2D sampler2; uniform sampler2D sampler3; //uniform sampler2D alphamap0; uniform sampler2D alphamap1; uniform sampler2D alphamap2; uniform sampler2D alphamap3; uniform int tex_count; uniform vec4 color_ambient = vec4(0.75, 0.75, 0.75, 1.0); uniform vec4 color_diffuse = vec4(0.25, 0.25, 0.25, 1.0); //uniform vec4 color_specular = vec4(1.0, 1.0, 1.0, 1.0); uniform vec4 color_specular = vec4(0.1, 0.1, 0.1, 0.25); uniform float shininess = 5.0f; uniform vec3 light_position = vec3(12.0f, 32.0f, 560.0f); void main(){ vec3 light_direction = normalize(light_position - vertex_in.vs_worldpos); vec3 normal = normalize(vertex_in.vs_normal); vec3 half_vector = normalize(light_direction + normalize(vertex_in.vs_worldpos)); float diffuse = max(0.0, dot(normal, light_direction)); float specular = pow(max(0.0, dot(vertex_in.vs_normal, half_vector)), shininess); gl_FragColor = texture( sampler0, vertex_in.UV ) * color_ambient + diffuse * color_diffuse + specular * color_specular; // http://www.opengl.org/wiki/Texture_Combiners // GL_MODULATE = * // GL_INTERPOLATE Blend tex0 and tex1 based on a blending factor = mix(texel0, texel1, BlendFactor) // GL_INTERPOLATE Blend tex0 and tex1 based on alpha of tex0 = mix(texel0, texel1, texel0.a) // GL_ADD = clamp(texel0 + texel1, 0.0, 1.0) if (tex_count > 0){ vec4 temp = texture( sampler1, vertex_in.UV ); vec4 amap = texture( alphamap1, vertex_in.UV); gl_FragColor = mix(gl_FragColor, temp, amap.a); } if (tex_count > 1){ vec4 temp = texture( sampler2, vertex_in.UV ); vec4 amap = texture( alphamap2, vertex_in.UV); gl_FragColor = mix(gl_FragColor, temp, amap.a); } if (tex_count > 2){ vec4 temp = texture( sampler3, vertex_in.UV ); vec4 amap = texture( alphamap3, vertex_in.UV); gl_FragColor = mix(gl_FragColor, temp, amap.a); } } It takes indexed GL_TRIANGLE_STRIP as input glBindBuffer(GL_ARRAY_BUFFER, tMt.vertex_buf_id[cx, cy]); glVertexAttribPointer(VERTEX_LAYOUT_POSITION, 3, GL_FLOAT, false, 0, pointer(0)); glEnableVertexAttribArray(0); { chunk tex position } glBindBuffer(GL_ARRAY_BUFFER, chunkTexPositionBO); glVertexAttribPointer(VERTEX_LAYOUT_TEX_UV, 2, GL_FLOAT, false, 0, pointer(0)); glEnableVertexAttribArray(1); glBindBuffer(GL_ARRAY_BUFFER, tMt.normal_buf_id[cx, cy]); glVertexAttribPointer(VERTEX_LAYOUT_NORMAL, 3, GL_FLOAT, true, 0, pointer(0)); glEnableVertexAttribArray(2); { index buffer } glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, chunkIndexBO); for i := 0 to tMt.texCount - 1 do begin bt := tMt.texture_buf_id[cx, cy][i]; if bt = nil then break; glUniform1i(proj_tex_count_loc, i); glActiveTexture(GL_TEXTURE0 + i); glBindTexture(GL_TEXTURE_2D, bt.id); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); if i > 0 then begin // this time, use blending: glActiveTexture(GL_TEXTURE4 + 1); glBindTexture(GL_TEXTURE_2D, tMt.alphamaps[cx, cy][i - 1]); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); end; end; glDrawElements(GL_TRIANGLE_STRIP, length(chunkIndexArr), GL_UNSIGNED_SHORT, nil); Code works as intended except I'm not sure is my normals arranged properly: they was stored as bytes (converted to GLfloat as b / FF), coordinates xyz changed and some probably need negation. Can someone show me geometry shader to show normals as lines as shown at http://blogs.agi.com/insight3d/index.php/2008/10/23/geometry-shader-for-debugging-normals/ (those shader not works at all and it seems out/in data losed between vertex and fragment shader). P.S. I'm not sure I did everything properly (starting OpenGL and GLSL) so any suggestions also appreciated. Edit: I made simple geometry shader by examples // This is a very simple pass-through geometry shader #version 330 core layout (triangles) in; layout (triangle_strip, max_vertices = 145) out; in VS_GS_VERTEX { vec2 UV; vec3 vs_worldpos; vec3 vs_normal; } vertex_in[]; out GS_FS_VERTEX { vec2 UV; vec3 vs_worldpos; vec3 vs_normal; } vertex_out; uniform float uNormalsLength = 0.5; void main() { int i; // Loop over the input vertices for (i = 0; i < gl_in.length(); i++) { vertex_out.UV = vertex_in[i].UV; vertex_out.vs_worldpos = vertex_in[i].vs_worldpos; vertex_out.vs_normal = vertex_in[i].vs_normal; // Copy the input position to the output gl_Position = gl_PositionIn[i]; EmitVertex(); gl_Position = gl_ModelViewProjectionMatrix * (gl_PositionIn[i] + (vec4(vertex_in[i].vs_normal, 0) * uNormalsLength)); gl_FrontColor = vec4(0.0, 0.0, 0.0, 1.0); //gl_FrontColorIn[i]; EmitVertex(); } // End the primitive. This is not strictly necessary // and is only here for illustrative purposes. EndPrimitive(); } but I don't knwo where it takes gl_ModelViewProjectionMatrix (seems deprecated) and result looks awful, it seems everything including normals stripped. Picture in glPolygonMode(GL_FRONT, GL_LINE) mode, textures also trying to map onto those.
As it seems, you're doing it all in a single pass and you actually emit 6 vertices per incoming triangle. This is not what you want. Either do it in two passes, i.e. one pass for the mesh, the other for the normals, or try to emit the original triangle and a degenerate triangle for the normal. For simplicity I'd go for the two-pass version: Inside your render loop: render terrain if and only if debug geometry is to be rendered enable your debug normals shader render the terrain mesh a second time, passing POINTS to the vertex shader To make this work, you'll need a second program object that is made up like in the blog post you previously linked to, consisting of a simple pass trough vertex shader, the following geometry shader and a fragment shader for coloring the lines representing the normals. The vertex and fragment shaders should be no problem. Assuming you have a smoothed mesh, i.e. you have actual, averaged vertex normals, you can simply pass in points and emit lines. #version 330 core // assuming you have vertex normals, you need to render a vertex // only a single time. with any other prim type, you may render // the same normal multiple times layout (points) in; // Geometry shaders can only output points, line strips or triangle // strips by definition. you output a single line per vertex. therefore, // the maximum number of vertices per line_strip is 2. This is effectively // the same as rendering distinct line segments. layout (line_strip, max_vertices = 2) out; in vec3 vs_normal[]; uniform float normal_scale = 0.5; // don't forget: this is the default value! /* if you're never going to change the normal_scale, consider simply putting a constant there instead: const float normal_scale = 0.5; */ void main() { // we simply transform and emit the incoming vertex - this is v0 of our // line segment vec4 v0 = gl_in[0].gl_Position; gl_Position = gl_ModelViewProjectionMatrix * v0; EmitVertex(); // we calculate v1 of our line segment vec4 v1 = v0 + vec4(vs_normal[0] * normal_scale, 0); gl_Position = gl_ModelViewProjectionMatrix * v1; EmitVertex(); EndPrimitive(); } Warning: Untested code! This is probably as simple as it gets. Add a uniform to your fragment shader so you can color your normals as you like or simply export a constant color. Note: This code still uses gl_ModevelViewProjectionMatrix. If you're writing GL core code, please consider replacing legacy GL constructs, like the matrix stack, with your own stuff! Note 2: Your geometry shader is not what is usually referred to as a pass through shader. First, you do processing on the incoming data that is more than just assigning incoming values to outgoing values. Second, how can it be a pass-through shader, if you generate geometry? Pass-through means, you don't do anything else than pass incoming values to the next shader stage.
Why can't access the G-Buffer from my lighting shader?
I implemented a new rendering pipeline in my engine and rendering is broken now. When I directly draw a texture of the G-Buffer to screen, it shows up correctly. So the G-Buffer is fine. But somehow the lighting pass makes trouble. Even if I don't use the resulting texture of it but try to display albedo from G-Buffer after the lighting pass, it shows a solid gray color. I can't explain this behavior and the strange thing is that there are no OpenGL errors at any point. Vertex Shader to draw a fullscreen quad. #version 330 in vec4 vertex; out vec2 coord; void main() { coord = vertex.xy; gl_Position = vertex * 2.0 - 1.0; } Fragment Shader for lighting. #version 330 in vec2 coord; out vec3 image; uniform int type = 0; uniform sampler2D positions; uniform sampler2D normals; uniform vec3 light; uniform vec3 color; uniform float radius; uniform float intensity = 1.0; void main() { if(type == 0) // directional light { vec3 normal = texture2D(normals, coord).xyz; float fraction = max(dot(normalize(light), normal) / 2.0 + 0.5, 0); image = intensity * color * fraction; } else if(type == 1) // point light { vec3 pixel = texture2D(positions, coord).xyz; vec3 normal = texture2D(normals, coord).xyz; float dist = max(distance(pixel, light), 1); float magnitude = 1 / pow(dist / radius + 1, 2); float cutoff = 0.4; float attenuation = clamp((magnitude - cutoff) / (1 - cutoff), 0, 1); float fraction = clamp(dot(normalize(light - pixel), normal), -1, 1); image = intensity * color * attenuation * max(fraction, 0.2); } } Targets and samplers for the lighting pass. Texture ids are mapped to attachment respectively shader location. unordered_map<GLenum, GLuint> targets; targets.insert(make_pair(GL_COLOR_ATTACHMENT2, ...)); // light targets.insert(make_pair(GL_DEPTH_STENCIL_ATTACHMENT, ...)); // depth and stencil unordered_map<string, GLuint> samplers; samplers.insert(make_pair("positions", ...)); // positions from G-Buffer samplers.insert(make_pair("normals", ...)); // normals from G-Buffer Draw function for lighting pass. void DrawLights(unordered_map<string, GLuint> Samplers, GLuint Program) { auto lis = Entity->Get<Light>(); glClear(GL_COLOR_BUFFER_BIT); glEnable(GL_BLEND); glBlendFunc(GL_ONE, GL_ONE); glUseProgram(Program); int n = 0; for(auto i : Samplers) { glActiveTexture(GL_TEXTURE0 + n); glBindTexture(GL_TEXTURE_2D, i.second); glUniform1i(glGetUniformLocation(Program, i.first.c_str()), n); n++; } mat4 view = Entity->Get<Camera>(*Global->Get<unsigned int>("camera"))->View; for(auto i : lis) { int type = i.second->Type == Light::DIRECTIONAL ? 0 : 1; vec3 pos = vec3(view * vec4(Entity->Get<Form>(i.first)->Position(), !type ? 0 : 1)); glUniform1i(glGetUniformLocation(Program, "type"), type); glUniform3f(glGetUniformLocation(Program, "light"), pos.x, pos.y, pos.z); glUniform3f(glGetUniformLocation(Program, "color"), i.second->Color.x, i.second->Color.y, i.second->Color.z); glUniform1f(glGetUniformLocation(Program, "radius"), i.second->Radius); glUniform1f(glGetUniformLocation(Program, "intensity"), i.second->Intensity); glBegin(GL_QUADS); glVertex2i(0, 0); glVertex2i(1, 0); glVertex2i(1, 1); glVertex2i(0, 1); glEnd(); } glDisable(GL_BLEND); glActiveTexture(GL_TEXTURE0); glBindBuffer(GL_ARRAY_BUFFER, 0); glBindTexture(GL_TEXTURE_2D, 0); }
I found the error and it was such a stupid one. The old rendering pipeline bound the correct framebuffer before calling the draw function of that pass. But the new one didn't so each draw function had to do that itself. Therefore I wanted to update all draw function, but I missed the draw function of the lighting pass. Therefore the framebuffer of the G-Buffer was still bound and the lighting pass changed its targets. Thanks to you guys, you had no change to find that error, since I hadn't posted my complete pipeline system.