What I want to attrive is to render many small quads with this opengl function "glDrawArraysInstanced", the space between them is the same. For example, please refer to the follwing image:
The code is as follow:
void OpenGLShowVideo::displayBySmallMatrix()
{
// Now use QOpenGLExtraFunctions instead of QOpenGLFunctions as we want to
// do more than what GL(ES) 2.0 offers.
QOpenGLExtraFunctions *f = QOpenGLContext::currentContext()->extraFunctions();
f->glClearColor(9.f/255.0f, 14.f/255.0f, 15.f/255.0f, 1);
glClear(GL_COLOR_BUFFER_BIT);
f->glViewport(0, 0, this->width(), this->height());
m_displayByMatrixProgram->bind();
f->glActiveTexture(GL_TEXTURE0 + m_acRenderToScreenTexUnit);
f->glBindTexture(GL_TEXTURE_2D, m_renderWithMaskFbo->texture());
if (m_uniformsDirty) {
m_uniformsDirty = false;
m_displayByMatrixProgram->setUniformValue(m_samplerLoc, m_acRenderToScreenTexUnit);
m_proj.setToIdentity();
m_proj.perspective(INIT_VERTICAL_ANGLE, float(this->width()) / float(this->height()), m_fNearPlane, m_fFarPlane);
m_displayByMatrixProgram->setUniformValue(m_projMatrixLoc, m_proj);
QMatrix4x4 camera;
camera.lookAt(m_eye, m_eye + m_target, QVector3D(0, 1, 0));
m_displayByMatrixProgram->setUniformValue(m_camMatrixLoc, camera);
m_world.setToIdentity();
float fOffsetZ = m_fVerticalAngle / INIT_VERTICAL_ANGLE;
m_world.translate(m_fMatrixOffsetX, m_fMatrixOffsetY, fOffsetZ);
m_proj.scale(MATRIX_INIT_SCALE_X, MATRIX_INIT_SCALE_Y, 1.0f);
m_world.rotate(180, 1, 0, 0);
QMatrix4x4 wm = m_world;
m_displayByMatrixProgram->setUniformValue(m_worldMatrixLoc, wm);
QMatrix4x4 mm;
mm.setToIdentity();
m_displayByMatrixProgram->setUniformValue(m_myMatrixLoc, mm);
m_displayByMatrixProgram->setUniformValue(m_lightPosLoc, QVector3D(0, 0, 70));
QSize tmpSize = QSize(m_viewPortWidth, m_viewPortHeight);
m_displayByMatrixProgram->setUniformValue(m_resolutionLoc, tmpSize);
int whRatioVal = m_viewPortWidth / m_viewPortHeight;
m_displayByMatrixProgram->setUniformValue(m_whRatioLoc, whRatioVal);
}
m_geometries->bindBufferForArraysInstancedDraw();
f->glDrawArraysInstanced(GL_TRIANGLE_STRIP, 0, 4, m_viewPortWidth * m_viewPortHeight);
}
And the vertex shader code is as follow:
#version 330
layout(location = 0) in vec4 vertex;
out vec3 color;
uniform mat4 mvp_matrix;
uniform mat4 projMatrix;
uniform mat4 camMatrix;
uniform mat4 worldMatrix;
uniform mat4 myMatrix;
uniform vec2 viewResolution;
uniform int whRatio;
uniform sampler2D sampler;
void main() {
int posX = gl_InstanceID % int(viewResolution.x);
int posY = gl_InstanceID / int(viewResolution.y);
if( posY % whRatio < whRatio) {
posY = gl_InstanceID / int(viewResolution.x);
}
ivec2 pos = ivec2(posX, posY);
vec2 t = vec2( pos.x * 3.0, pos.y * 3.0 );
mat4 wm = mat4(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, t.x, t.y, 1, 1) * worldMatrix;
color = texelFetch(sampler,pos,0).rgb;
gl_Position = projMatrix * camMatrix * wm * vertex;
}
And the fragment shader is as follow:
#version 330 core
in vec3 color;
out vec4 fragColor;
void main() {
fragColor = vec4(color, 1.0);
}
However, when I move the camera far from the screen (by changing the [camera.lookAt (m_eye, m_eye + m_target, QVector3D (0, 1, 0);] "m_eye" parameter value), I got sth like this:
The space between quads is different, and the size of the quad is also different. But when I move the camera closer to the screen, it looks much better.
I think what you're seeing there is the result of rounding the coordinates to the nearest integer pixel coordinate.
To get something that looks more even, you want to use some form of anti-aliasing. The options that spring to mind are:
Enable some sort of full screen anti-aliasing like MSAA. This is simple to enable, but can have a significant performance cost.
Put your pattern in a texture, and tile that texture over a single quad. Texture filtering and mip maps should take care of the anti-aliasing for you, and it will probably be faster to render that way as well because you only need a single quad.
I have a problem with a multisampled texture. It seems after blitting it to another surface for rendering, it's flipped upside down. What might cause that ? Should I provide some code ?
edit: Well, it's gonna be a lot of code, but here we go. This is how I create my surfaces / textures:
protected override void Create(int width, int height, SurfaceFormat format)
{
this.format = format;
bool multisample = format.Multisampling > 0;
int samples = Math.Max(0, Math.Min(format.Multisampling, 4));
format.TextureTarget = multisample ? TextureTarget.Texture2DMultisample : format.TextureTarget;
format.MipMapping = format.MipMapping && format.TextureTarget == TextureTarget.Texture2D;
Width = width;
Height = height;
textureHandle = GL.GenTexture();
//bind texture
GL.BindTexture(format.TextureTarget, textureHandle);
Log.Error("Bound Texture: " + GL.GetError());
if (format.TextureTarget == TextureTarget.Texture2D)
{
GL.TexParameter(format.TextureTarget, TextureParameterName.TextureMinFilter, (int)(format.MipMapping ? TextureMinFilter.LinearMipmapLinear : TextureMinFilter.Linear));
GL.TexParameter(format.TextureTarget, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);
GL.TexParameter(format.TextureTarget, TextureParameterName.TextureWrapS, (int)format.WrapMode);
GL.TexParameter(format.TextureTarget, TextureParameterName.TextureWrapT, (int)format.WrapMode);
}
Log.Debug("Created Texture Parameters: " + GL.GetError());
if (samples < 1)
GL.TexImage2D(format.TextureTarget, 0, format.InternalFormat, Width, Height, 0, format.PixelFormat, format.SourceType, format.Pixels);
else
GL.TexImage2DMultisample(TextureTargetMultisample.Texture2DMultisample, samples, format.InternalFormat, Width, Height, true);
if (format.MipMapping)
GL.GenerateMipmap(GenerateMipmapTarget.Texture2D);
Log.Debug("Created Image: " + GL.GetError());
//unbind texture
GL.BindTexture(format.TextureTarget, 0);
//create depthbuffer
if (format.DepthBuffer)
{
GL.GenRenderbuffers(1, out dbHandle);
GL.BindRenderbuffer(RenderbufferTarget.RenderbufferExt, dbHandle);
if(multisample)
GL.RenderbufferStorageMultisample(RenderbufferTarget.RenderbufferExt, samples, RenderbufferStorage.DepthComponent24, Width, Height);
else
GL.RenderbufferStorage(RenderbufferTarget.RenderbufferExt, RenderbufferStorage.DepthComponent24, Width, Height);
}
//create fbo
fboHandle = GL.GenFramebuffer();
GL.BindFramebuffer(FramebufferTarget.FramebufferExt, fboHandle);
GL.FramebufferTexture2D(FramebufferTarget.FramebufferExt, FramebufferAttachment.ColorAttachment0Ext, format.TextureTarget, textureHandle, 0);
if (format.DepthBuffer)
GL.FramebufferRenderbuffer(FramebufferTarget.FramebufferExt, FramebufferAttachment.DepthAttachmentExt, RenderbufferTarget.RenderbufferExt, dbHandle);
Log.Debug("Framebuffer status: " + GL.CheckFramebufferStatus(FramebufferTarget.FramebufferExt));
Log.Debug("Created Framebuffer: " + GL.GetError());
GL.BindFramebuffer(FramebufferTarget.FramebufferExt, 0);
}
creation:
var sf = SurfaceFormat.Surface2D;
sf.Multisampling = 4;
multisampler = new Surface(Window.Width, Window.Height, sf);
Now in the render loop I do the following:
//Render entire scene to multisampler
SceneRenderer.RenderMultisampled(ActiveCamera, multisampler, time);
//blit sampler to my material input texture
multisampler.CloneTo(postEffect.Textures["_tex"]);
//blit this texture to my "Canvas" (basically a surface with additional drawing methods. The canvas material is use as a texture for a quad in my scene, thus rendering a copy of the output image to a plane.
postEffect.Textures["_tex"].CloneTo(canvas.Surface);
//This would be the same but via rendering with a quad instead of blitting. Has the same result
//canvas.Clear();
//canvas.DrawMaterial(postEffect);
//clear framebuffer
GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0);
//Set viewport
GL.Viewport(0, 0, Window.Width, Window.Height);
//use material (bind shader & shader params) and draw the scene.
postEffect.Use();
Helper.DrawScreenQuad();
GL.UseProgram(0);
If this is not enough, I can also post the shaders & mesh code.
EDIT2: Okay everything is now working as expected EXCEPT when I use canvas.draw() instead of blitting the texture. The draw method looks like this:
public void DrawMaterial(Material material)
{
GL.Viewport(0, 0, Surface.Width, Surface.Height);
Surface.BindFramebuffer();
material.Use();
Helper.DrawScreenQuad();
GL.UseProgram(0);
GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0);
}
Draw screen quad:
public static void DrawScreenQuad()
{
GL.Begin(PrimitiveType.Quads);
GL.TexCoord2(0, 1);
GL.Vertex2(-1, -1);
GL.TexCoord2(1, 1);
GL.Vertex2(1, -1);
GL.TexCoord2(1, 0);
GL.Vertex2(1, 1);
GL.TexCoord2(0, 0);
GL.Vertex2(-1, 1);
GL.End();
}
Shader used:
[Shader vertex]
#version 150 core
in vec2 _pos;
out vec2 texCoord;
uniform float _time;
uniform sampler2D tex;
void main() {
gl_Position = vec4(_pos, 0, 1);
texCoord = _pos/2+vec2(0.5,0.5);
texCoord.y = 1 - texCoord.y;
}
[Shader fragment]
#version 150 core
#define PI 3.1415926535897932384626433832795
out vec4 outColor;
uniform float _time;
uniform sampler2D tex;
in vec2 texCoord;
//
void main() {
outColor = texture2D(tex, texCoord);
}
Somehow the rendered scene gets turned upside down by this. Why ?
I think I found my mistake. I had the texture coordinates AND the camera inverted. It seems to be fixed now. Why I still don't undestand is, why this works:
[Shader vertex]
#version 150 core
in vec2 _pos;
out vec2 texCoord;
uniform float _time;
uniform sampler2D tex;
void main() {
gl_Position = vec4(_pos, 0, 1);
texCoord = _pos/2+vec2(0.5,0.5);
//texCoord.y = 1 - texCoord.y;
}
[Shader fragment]
#version 150 core
#define PI 3.1415926535897932384626433832795
out vec4 outColor;
uniform float _time;
uniform sampler2D tex;
in vec2 texCoord;
//
void main() {
outColor = texture2D(tex, texCoord);
}
I would've expected that the y coordinate of the tex coord would need to be inverted.
I'm using the tutorials on http://arcsynthesis.org/gltut/ to learn OpenGL, it's required, I have to use it. Mostly I want to apply the textures from Tutorial 15 onto objects in tutorial 7 (world with UBO).
For now it seemed like the textures only work when mipmaps are turned on. This comes with a downside: The only mipmap used is the one with an index of zero, and that's the 1 colored 1x1 pixel one. I tried setting the minimum level of a mipmap higher or turning off mipmaps entirely, but even that doesn't fix thing, because then everything turns pitch black. Now I'll list the most important parts of my program
EDIT: I guess I'll add more details...
The vertex shader has something like this:
#version 330
layout(location = 0) in vec4 position;
layout(location = 1) in vec4 color;
layout(location = 2) in vec3 normal;
//Added these later
layout(location = 5) in vec2 texCoord;
out vec2 colorCoord;
smooth out vec4 interpColor;
out vec3 vertexNormal;
out vec3 modelSpacePosition;
out vec3 cameraSpacePosition;
uniform mat4 worldToCameraMatrix;
uniform mat4 modelToWorldMatrix;
uniform mat3 normalModelToCameraMatrix;
uniform vec3 dirToLight;
uniform vec4 lightIntensity;
uniform vec4 ambientIntensity;
uniform vec4 baseColor;
uniform mat4 cameraToClipMatrix;
void main()
{
vertexNormal = normal;
vec3 normCamSpace = normalize(normalModelToCameraMatrix * vertexNormal);
cameraSpacePosition = normCamSpace;
float cosAngIncidence = dot(normCamSpace, dirToLight);
cosAngIncidence = clamp(cosAngIncidence, 0, 1);
modelSpacePosition.x = position.x;
modelSpacePosition.y = position.y;
modelSpacePosition.z = position.z;
vec4 temp = modelToWorldMatrix * position;
temp = worldToCameraMatrix * temp;
gl_Position = cameraToClipMatrix * temp;
interpColor = ((lightIntensity * cosAngIncidence) + (ambientIntensity)) * baseColor;
colorCoord= texCoord ;
}
The fragment shader like this:
#version 330
in vec3 vertexNormal;
in vec3 modelSpacePosition;
smooth in vec4 interpColor;
uniform vec3 modelSpaceLightPos;
uniform vec4 lightIntensity2;
uniform vec4 ambientIntensity2;
out vec4 outputColor;
//Added later
in vec2 colorCoord;
uniform sampler2D colorTexture;
void main()
{
vec3 lightDir2 = normalize(modelSpacePosition - modelSpaceLightPos);
float cosAngIncidence2 = dot(normalize(vertexNormal), lightDir2);
cosAngIncidence2 = clamp(cosAngIncidence2, 0, 1);
float light2DistanceSqr = dot(modelSpacePosition - modelSpaceLightPos, modelSpacePosition - modelSpaceLightPos);
//added
vec4 texture2 = texture(colorTexture, colorCoord);
outputColor = ((ambientIntensity2 + (interpColor*2))/4) +
((((interpColor) * lightIntensity2/200 * cosAngIncidence2) + (ambientIntensity2* interpColor ))
/( ( sqrt(light2DistanceSqr) + light2DistanceSqr)/200 ));
//No outputColor for texture testing
outputColor = texture2 ;
}
}
Those were both shaders. And here are the parts added to the .cpp:
#include <glimg/glimg.h>
#include "../framework/directories.h"
[...]
const int g_colorTexUnit = 0;
GLuint g_checkerTexture = 0;
And here's the loader for the texture:
void LoadCheckerTexture()
{
try
{
std::string filename(LOCAL_FILE_DIR);
filename += "checker.dds";
std::auto_ptr<glimg::ImageSet>
pImageSet(glimg::loaders::dds::LoadFromFile(filename.c_str()));
glGenTextures(1, &g_checkerTexture);
glBindTexture(GL_TEXTURE_2D, g_checkerTexture);
glimg::SingleImage image = pImageSet->GetImage(0, 0, 0);
glimg::Dimensions dims = image.GetDimensions();
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, dims.width, dims.height, 0, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, image.GetImageData());
glBindTexture(GL_TEXTURE_2D, 0);
}
catch(std::exception &e)
{
printf("%s\n", e.what());
throw;
}
}
Naturally I've got this in void init():
LoadCheckerTexture();
And then when rendering the object:
glActiveTexture(GL_TEXTURE0 + g_colorTexUnit);
glBindTexture(GL_TEXTURE_2D,g_checkerTexture);
g_pLeftMesh->Render();
glBindSampler(g_colorTexUnit, 0);
glBindTexture(GL_TEXTURE_2D, 0);
With all of this, I get put pitch black for everything, however when I change the outputColor equation into "texture + outputColor;", everything looks normal. I have no idea what I'm doing wrong here. A friend tried to help me, we removed some unnecessairy stuff, but we got nothing running.
Ok guys, I've worked on this whole thing, and did manage to somehow get it running. First off I had to add samplers:
GLuint g_samplers;
//Add Later
void CreateSamplers()
{
glGenSamplers(1, &g_samplers);
glSamplerParameteri(g_samplers, GL_TEXTURE_WRAP_S, GL_REPEAT);
glSamplerParameteri(g_samplers, GL_TEXTURE_WRAP_T, GL_REPEAT);
//Linear mipmap Nearest
glSamplerParameteri(g_samplers, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glSamplerParameteri(g_samplers, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
}
I also added this to the file thing:
glimg::OpenGLPixelTransferParams xfer = glimg::GetUploadFormatType(pImageSet->GetFormat(), 0);
glimg::SingleImage image = pImageSet->GetImage(0, 0, 0);
glimg::Dimensions dims = image.GetDimensions();
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, dims.width, dims.height, 0,
xfer.format, xfer.type, image.GetImageData());
The xfer variable does get the format and type adjusted to the dds.
Also the render code got turned into this:
//Added necessary
glActiveTexture(GL_TEXTURE0 + g_colorTexUnit);
glBindTexture(GL_TEXTURE_2D,g_checkerTexture);
glBindSampler(g_colorTexUnit, g_samplers);
g_pLeftMesh->Render();
glBindSampler(g_colorTexUnit, 0);
glBindTexture(GL_TEXTURE_2D, 0);
And of course at the end of init() I needed to add the CreateSamplers thing:
//Added this later
LoadCheckerTexture();
CreateSamplers();
I'm sorry for all the trouble with all this, but guess OpenGL really is just this confusing and it was just dumb luck that I got it right. Just posting this so that people know
Your fail to add textures may be caused by:
Have you add texture coordinates to objects? (this is the most probable cause, because you are adding textures to non textured tutorial), add textures to VAO.
Did you add uniform textureunit (Sampler2D)? (it must be uniform, else texturing will not work properly)
Is your texture loaded,binded,enabled (GL_TEXTURE_2D) ?
Is your active texture unit - 0? if not change layout/multitexture coords or set active texture 0
This two codes are simple texturing shaders (texture unit 0) no special things (like light,blend,bump,...):
tm_l2g is transformation local obj space -> world space (Modelview)
tm_g2s is transformation world space -> screen space (Projection)
pos are vertex coordinates
txt are texture coordinates
col are colors
Do not forget to change uniform names and layout locations to yours.
Vertex:
//------------------------------------------------------------------
#version 420 core
//------------------------------------------------------------------
uniform mat4x4 tm_l2g;
uniform mat4x4 tm_g2s;
layout(location=0) in vec3 pos;
layout(location=1) in vec4 col;
layout(location=2) in vec2 txr;
out smooth vec4 pixel_col;
out smooth vec2 pixel_txr;
//------------------------------------------------------------------
void main(void)
{
vec4 p;
p.xyz=pos;
p.w=1.0;
p=tm_l2g*p;
p=tm_g2s*p;
gl_Position=p;
pixel_col=col;
pixel_txr=txr;
}
//------------------------------------------------------------------
fragment:
//------------------------------------------------------------------
#version 420 core
//------------------------------------------------------------------
in smooth vec4 pixel_col;
in smooth vec2 pixel_txr;
uniform sampler2D txr_texture0;
out layout(location=0) vec4 frag_col;
//------------------------------------------------------------------
void main(void)
{
vec4 col;
col=texture(txr_texture0,pixel_txr.st);
frag_col=col*pixel_col;
}
//------------------------------------------------------------------
[edit1] CPU old style OpenGL render code (initializations are not included its only render code they can be found here)
//------------------------------------------------------------------
// set modelview,projection,textures,bind GLSL programs...
GLfloat a=10.0,z=0.0;
glColor3f(1.0,1.0,1.0);
glBegin(GL_QUADS);
// textured quad
glTexCoord2f(0.0,0.0); glVertex3f(-a,-a,z);
glTexCoord2f(0.0,1.0); glVertex3f(-a,+a,z);
glTexCoord2f(1.0,1.0); glVertex3f(+a,+a,z);
glTexCoord2f(1.0,0.0); glVertex3f(+a,-a,z);
// reverse order quad to be shore that at least one passes by CULL_FACE
glTexCoord2f(1.0,0.0); glVertex3f(+a,-a,z);
glTexCoord2f(1.0,1.0); glVertex3f(+a,+a,z);
glTexCoord2f(0.0,1.0); glVertex3f(-a,+a,z);
glTexCoord2f(0.0,0.0); glVertex3f(-a,-a,z);
glEnd();
//------------------------------------------------------------------
[edit2] ok here goes VAO/VBO render code,...
//------------------------------------------------------------------------------
// enum of VBO locations (it is also your layout location) I use enums for simple in code changes
enum _vbo_enum
{
_vbo_pos=0, // glVertex
_vbo_col, // glColor
_vbo_tan, // glNormal
_vbo_unused0, // unused (at least i dont see anything at this location in your code)
_vbo_unused1, // unused (at least i dont see anything at this location in your code)
_vbo_txr, // glTexCoord
_vbos
};
//------------------------------------------------------------------------------
// 'global' names and size for OpenGL mesh in VAO/VBO ... similar ot texture names/handles
GLuint vao[1],vbo[_vbos],num_pnt=0;
//------------------------------------------------------------------------------
void VAO_init_cube() // call this before VAO use,...but after OpenGL init !
{
//[1] first you need some model to render (mesh), here is a simple cube
// size,position of cube - change it that it is visible in your scene
const GLfloat a=1.0,x=0.0,y=0.0,z=0.0;
// cube points 3f x,y,z
GLfloat mesh_pos[]=
{
x-a,y-a,z-a,x-a,y+a,z-a,x+a,y+a,z-a,x+a,y-a,z-a,
x-a,y-a,z+a,x-a,y+a,z+a,x+a,y+a,z+a,x+a,y-a,z+a,
x-a,y-a,z-a,x-a,y-a,z+a,x+a,y-a,z+a,x+a,y-a,z-a,
x-a,y+a,z-a,x-a,y+a,z+a,x+a,y+a,z+a,x+a,y+a,z-a,
x-a,y-a,z-a,x-a,y+a,z-a,x-a,y+a,z+a,x-a,y-a,z+a,
x+a,y-a,z-a,x+a,y+a,z-a,x+a,y+a,z+a,x+a,y-a,z+a,
};
// cube colors 3f r,g,b
GLfloat mesh_col[]=
{
0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,
0.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,
0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,
0.0,1.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,
1.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,
};
// cube normals 3f x,y,z
GLfloat mesh_tan[]=
{
-0.6,-0.6,-0.6,-0.6,+0.6,-0.6,+0.6,+0.6,-0.6,+0.6,-0.6,-0.6,
-0.6,-0.6,+0.6,-0.6,+0.6,+0.6,+0.6,+0.6,+0.6,+0.6,-0.6,+0.6,
-0.6,-0.6,-0.6,-0.6,-0.6,+0.6,+0.6,-0.6,+0.6,+0.6,-0.6,-0.6,
-0.6,+0.6,-0.6,-0.6,+0.6,+0.6,+0.6,+0.6,+0.6,+0.6,+0.6,-0.6,
-0.6,-0.6,-0.6,-0.6,+0.6,-0.6,-0.6,+0.6,+0.6,-0.6,-0.6,+0.6,
+0.6,-0.6,-0.6,+0.6,+0.6,-0.6,+0.6,+0.6,+0.6,+0.6,-0.6,+0.6,
};
// cube texture coords 2f s,t
GLfloat mesh_txr[]=
{
0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,
};
// init VAO/VBO
glGenVertexArrays(1,vao); // allocate 1 x VAO
glGenBuffers(_vbos,vbo); // allocate _vbos x VBO
// copy mesh to VAO/VBO ... after this you do not need the mesh anymore
GLint i,sz,n; // n = number of numbers per 1 entry
glBindVertexArray(vao[0]);
num_pnt=sizeof(mesh_pos)/(sizeof(GLfloat)*3); // num of all points in mesh
i=_OpenGLVAOgfx_pos; n=3; sz=sizeof(GLfloat)*n;
glBindBuffer(GL_ARRAY_BUFFER,vbo[i]);
glBufferData(GL_ARRAY_BUFFER,sz*num_pnt,mesh_pos,GL_STATIC_DRAW);
glEnableVertexAttribArray(i);
glVertexAttribPointer(i,n,GL_FLOAT,GL_FALSE,0,0);
i=_OpenGLVAOgfx_col; n=3; sz=sizeof(GLfloat)*n;
glBindBuffer(GL_ARRAY_BUFFER,vbo[i]);
glBufferData(GL_ARRAY_BUFFER,sz*num_pnt,mesh_col,GL_STATIC_DRAW);
glEnableVertexAttribArray(i);
glVertexAttribPointer(i,n,GL_FLOAT,GL_FALSE,0,0);
i=_OpenGLVAOgfx_tan; n=3; sz=sizeof(GLfloat)*n;
glBindBuffer(GL_ARRAY_BUFFER,vbo[i]);
glBufferData(GL_ARRAY_BUFFER,sz*num_pnt,mesh_tan,GL_STATIC_DRAW);
glEnableVertexAttribArray(i);
glVertexAttribPointer(i,n,GL_FLOAT,GL_FALSE,0,0);
i=_OpenGLVAOgfx_txr; n=2; sz=sizeof(GLfloat)*n;
glBindBuffer(GL_ARRAY_BUFFER,vbo[i]);
glBufferData(GL_ARRAY_BUFFER,sz*num_pnt,mesh_txr,GL_STATIC_DRAW);
glEnableVertexAttribArray(i);
glVertexAttribPointer(i,n,GL_FLOAT,GL_FALSE,0,0);
glBindVertexArray(0);
}
//------------------------------------------------------------------------------
void VAO_draw() // call this to draw your mesh,... need to enable and bind textures,... before use
{
glDisable(GL_CULL_FACE);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glBindVertexArray(vao[0]);
glEnableVertexAttribArray(_vbo_pos);
glEnableVertexAttribArray(_vbo_col);
glEnableVertexAttribArray(_vbo_tan);
glDisableVertexAttribArray(_vbo_unused0);
glEnableVertexAttribArray(_vbo_txr);
glDrawArrays(GL_QUADS,0,num_pnt);
glDisableVertexAttribArray(_vbo_pos);
glDisableVertexAttribArray(_vbo_col);
glDisableVertexAttribArray(_vbo_tan);
glDisableVertexAttribArray(_vbo_unused0);
glDisableVertexAttribArray(_vbo_unused1);
glDisableVertexAttribArray(_vbo_txr);
glBindVertexArray(0);
}
//------------------------------------------------------------------------------
void VAO_exit() // clean up ... call this when you do not need VAO/VBO anymore
{
glDisableVertexAttribArray(_vbo_pos);
glDisableVertexAttribArray(_vbo_col);
glDisableVertexAttribArray(_vbo_tan);
glDisableVertexAttribArray(_vbo_unused0);
glDisableVertexAttribArray(_vbo_unused1);
glDisableVertexAttribArray(_vbo_txr);
glBindVertexArray(0);
glDeleteVertexArrays(1,vao);
glDeleteBuffers(_vbos,vbo);
}
//------------------------------------------------------------------------------
[edit3] if you are win32/64 user you can try my IDE for GLSL
It is very simple and easy to use, but cannot change texture/attrib locations. Press [F1] for help,... [F9] for run [F10] for return to normal OpenGL mode. Also txt-editor is little buggy sometimes but it is enough for my purpose.
GLSL IDE