Why in this program four points drawn by glVertex3f rendered in pointed positions, but instead of four array points rendered only one in the center of the window?
#include <GL/glew.h>
#include <GL/glut.h>
#include <iostream>
#include <vector>
using namespace std;
struct point3 {
GLfloat x, y, z;
};
vector<point3> Vertices(4);
void display(void)
{
glClear (GL_COLOR_BUFFER_BIT);
GLuint abuffer;
glGenVertexArrays(1, &abuffer);
glBindVertexArray(abuffer);
GLuint buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
Vertices = {{0.3f, 0.3f, 0.5f}, {0.6f, 0.6f, 0.5f}, {0.6f, 0.3f, 0.5f}, {0.3f, 0.6f, 0.5f}};
glBufferData(GL_ARRAY_BUFFER, Vertices.size() * sizeof(Vertices[0]), &Vertices[0], GL_STATIC_DRAW);
glVertexPointer(3, GL_FLOAT, 0, &Vertices[0]);
glDrawArrays(GL_POINTS, 0, Vertices.size());
glFlush();
glBegin(GL_POINTS);
glVertex3f(0.25f, 0.25f, 0.5f);
glVertex3f(0.25f, 0.75f, 0.5f);
glVertex3f(0.75f, 0.75f, 0.5f);
glVertex3f(0.75f, 0.25f, 0.5f);
glEnd();
glFlush();
}
void init (void)
{
glClearColor (0.0, 0.0, 0.0, 0.0);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0, 1.0, 0.0, 1.0, -1.0, 1.0);
}
int main(int argc, char **argv)
{
glutInit(&argc, argv);
glutInitDisplayMode (GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(500, 500);
glutInitWindowPosition(0, 0);
glutCreateWindow("simple OpenGL example");
glutDisplayFunc(display);
GLenum err = glewInit();
if (GLEW_OK != err) {
cerr << "Error: " << glewGetErrorString(err) << endl;
}
cout << "Status: Using GLEW" << glewGetString(GLEW_VERSION) << endl;
init();
glutMainLoop();
}
You haven't enabled the attribute to actually use the array. You do that by:
(OpenGL 2) glEnableClientState(GL_VERTEX_ARRAY) (or GL_COLOR_ARRAY, etc)
(OpenGL 3) glEnableVertexArray(attributeId)
Without that, what you point to with glVertexPointer (GL2) or glVertexAttribPointer (GL3) won't be actually used.
Related
So I am using GLEW & SDL2. At the moment I'm having trouble with drawing a simple rectangle.
I've done this before and got it to work no problem. (I have checked my old code, but could not find any signifigant differences)
Here's what the output should look like, aside from a different background color.
However, this is what the output looks like:
When I check the output with Renderdoc, all the values seem to be as they should be, but for some reason the output rectangle is signifigantly larger than the viewport. (Assuming the white border in the output window represents the viewport.)
I've googled around a bit, but could not find any solution to this issue. I'm figuring I'm just missing a single function call somewhere which is causing the program to output things in a weird way.
Here's the relevant code
game.cpp
#include "Game.h"
void Game::Game::main_loop()
{
//Placeholder setup;
float vertices[] = {
-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
0.5f, 0.5f, 0.0f,
-0.5f, 0.5f, 0.0f,
};
unsigned int indices[] = {
0, 1, 2,
2, 3, 0
};
unsigned int vertex_array, vertex_buffer, element_buffer;
glGenVertexArrays(1, &vertex_array);
glBindVertexArray(vertex_array);
glGenBuffers(1, &vertex_buffer);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glGenBuffers(1, &element_buffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, element_buffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, false, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
Asset::Loader l;
auto s = l.load_shader("res/GUI/Shaders/Textured_quad.vert", "res/GUI/Shaders/Textured_quad.frag");
keep_going = true;
while(keep_going)
{
handle_events();
renderer.clear();
s->use(); //Just calls 'glUseProgram' on the shader inside this object
glBindVertexArray(vertex_array);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
renderer.show();
}
}
void Game::Game::handle_events()
{
while(SDL_PollEvent(&event))
{
switch(event.type)
{
case SDL_MOUSEBUTTONDOWN:
case SDL_QUIT:
keep_going = false;
}
}
}
Renderer.cpp
#include "Renderer.h"
Game::Graphics::Renderer::Renderer()
{
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
window = SDL_CreateWindow(
"GUI",
SDL_WINDOWPOS_CENTERED_DISPLAY(1),
SDL_WINDOWPOS_CENTERED_DISPLAY(1),
640,
480,
SDL_WINDOW_OPENGL
);
context = SDL_GL_CreateContext(
window
);
if(!window)
{
std::cerr << "Window Error: " << SDL_GetError() << std::endl;
return;
}
if(!context)
{
std::cerr << "GL Context Error: " << SDL_GetError() << std::endl;
return;
}
glewInit();
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glViewport(0, 0, 640, 480);
glEnable(GL_DEPTH_TEST);
glEnable(GL_TEXTURE_2D);
glEnable(GL_LIGHTING);
glDisable(GL_BLEND);
}
Game::Graphics::Renderer::~Renderer()
{
SDL_HideWindow(window);
SDL_GL_DeleteContext(context);
SDL_DestroyWindow(window);
}
void Game::Graphics::Renderer::clear()
{
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
void Game::Graphics::Renderer::show()
{
SDL_GL_SwapWindow(window);
}
Shader
#version 330 core
layout (location = 0) in vec3 a_pos;
void main()
{
gl_Position = vec4(a_pos, 0.0f);
}
//fragment
#version 330 core
out vec4 o_color;
void main()
{
o_color = vec4(1.0f, 0.0f, 0.0f, 1.0f);
}
gl_Position = vec4(a_pos, 0.0f);
The X/Y/Z coordinates in gl_Position get divided by the W coordinate to get the screen coordinates. You are setting the W coordinate to 0, which means the X and Y coordinates become infinity and -infinity (1/0 and -1/0) and the Z coordinate becomes NaN.
Change it to:
vvvv
gl_Position = vec4(a_pos, 1.0f);
I draw patterns which have detailed pixels and they often face with Moire effect. I am not good at shading and I am not sure if this problem will be solved by shaders. I have not found any basic, understandable and complete example of shaders. Most of the tutorial websites start a program from the middle omitting the header file includes!
This is a MWE of my code. Is it possible to mitigate or remove the Moire effect from it?
#include <cmath>
#include <vector>
#ifdef __APPLE__
#include <GLUT/glut.h>
#else
#include <GL/glut.h>
#endif
const int w=640,h=480;
float cam_angle=0.0f;
void init()
{
GLfloat LightAmbient[] = { 0.2f, 0.2f, 0.2f, 1.0f };
GLfloat LightDiffuse[] = { 0.5f, 0.5f, 0.5f, 1.0f };
GLfloat LightPosition[] = { 5.0f, 5.0f, -10.0f, 1.0f };
glClearColor(0.0, 0.0, 0.0, 0.0);
glShadeModel(GL_SMOOTH);
glEnable(GL_DEPTH_TEST);
glLightfv(GL_LIGHT1, GL_AMBIENT, LightAmbient);
glLightfv(GL_LIGHT1, GL_DIFFUSE, LightDiffuse);
glLightfv(GL_LIGHT1, GL_POSITION, LightPosition);
glEnable(GL_LIGHT1);
}
void onDisplay()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective (90, float(w)/float(h), 0.01, 10000);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
const double pi=3.1415926;
double cam_x=2.0*cos(pi/4.0)*cos(cam_angle);;
double cam_y=2.0*sin(pi/4.0)*cos(cam_angle);;
double cam_z=2.0*sin(cam_angle);
gluLookAt(cam_x, cam_y, cam_z, 0, 0, 0, 0, 0, 1);
struct Point3D
{
double x, y, z;
unsigned char r, g, b, a=255;
};
for(double r=0.5;r<=1.0;r+=0.03)
{
std::vector<Point3D> points;
for(int i=0;i<1000;i++)
{
double theta=double(i)/1000.0*pi*2.0;
Point3D p;
p.x=r*sin(theta);
p.y=r*cos(theta);
p.z=r;
p.r=128;
p.g=200;
p.b=50;
points.push_back(p);
}
// draw
glPushMatrix();
glColor3ub(255,255,255);
glEnableClientState( GL_VERTEX_ARRAY );
glEnableClientState( GL_COLOR_ARRAY );
glVertexPointer(3, GL_DOUBLE, sizeof(Point3D), &points[0].x );
glColorPointer( 4, GL_UNSIGNED_BYTE, sizeof(Point3D), &points[0].r );
// glPointSize( 3.0 );
glLineWidth(2.0);
glDrawArrays( GL_LINE_STRIP, 0, int(points.size()) );
glDisableClientState( GL_VERTEX_ARRAY );
glDisableClientState( GL_COLOR_ARRAY );
glPopMatrix();
}
glFlush();
glutSwapBuffers();
}
void Timer(int /*value*/)
{
cam_angle+=0.01f;
glutPostRedisplay();
// 100 milliseconds
glutTimerFunc(100, Timer, 0);
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitWindowSize (w, h);
glutInitDisplayMode(GLUT_RGBA | GLUT_DEPTH | GLUT_DOUBLE);
glutInitWindowPosition (100, 100);
glutCreateWindow ("my window");
glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
init();
glutDisplayFunc(onDisplay);
Timer(0);
glutMainLoop();
return 0;
}
If you use Multisampling, then the result will be significantly improved:
glutSetOption(GLUT_MULTISAMPLE, 8);
glutInitDisplayMode(GLUT_RGBA | GLUT_DEPTH | GLUT_DOUBLE | GLUT_MULTISAMPLE);
without GLUT_MULTISAMPLE:
with GLUT_MULTISAMPLE:
See also the answer to GLUT + OpenGL multisampling anti aliasing (MSAA)
I am able to create a window and clear it to the desired color. But not able to draw a square in the lower left hand corner.
#include <iostream>
#define GLEW_STATIC
#include <GL/glew.h>
#include <GLFW/glfw3.h>
const GLint WIDTH = 720;
const GLint HEIGHT = 480;
int main()
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); // Attempts to set to opengl 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_RESIZABLE, GL_TRUE);
GLFWwindow *window = glfwCreateWindow(WIDTH, HEIGHT, "Learn OpenGL", nullptr, nullptr);
int screenWidth, screenHeight;
glfwGetFramebufferSize(window, &screenWidth, &screenHeight);
if (nullptr == window)
{
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return EXIT_FAILURE;
}
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
if (GLEW_OK != glewInit())
{
std::cout << "Failed to initialise GLEW" << std::endl;
return EXIT_FAILURE;
}
glViewport(0, 0, screenWidth, screenHeight);
while (!glfwWindowShouldClose(window))
{
glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(0.0f, 0.0f, 1.0f);
glOrtho(0.0, 1.0, 0.0, 1.0, -1.0, 1.0);
glBegin(GL_POLYGON);
glVertex3f(0.0, 0.0, 0.0);
glVertex3f(0.5, 0.0, 0.0);
glVertex3f(0.5, 0.5, 0.0);
glVertex3f(0.0, 0.5, 0.0);
glEnd();
glFlush();
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return EXIT_SUCCESS;
}
Deprecated fixed-function functionality (glBegin() et al & the matrix stacks) doesn't work in a Core context (GLFW_OPENGL_CORE_PROFILE).
Switch to a Compatibility context:
#include <iostream>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
const GLint WIDTH = 720;
const GLint HEIGHT = 480;
int main()
{
glfwInit();
glfwWindowHint(GLFW_RESIZABLE, GL_TRUE);
GLFWwindow *window = glfwCreateWindow(WIDTH, HEIGHT, "Learn OpenGL", nullptr, nullptr);
int screenWidth, screenHeight;
glfwGetFramebufferSize(window, &screenWidth, &screenHeight);
if (nullptr == window)
{
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return EXIT_FAILURE;
}
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
if (GLEW_OK != glewInit())
{
std::cout << "Failed to initialise GLEW" << std::endl;
return EXIT_FAILURE;
}
glViewport(0, 0, screenWidth, screenHeight);
while (!glfwWindowShouldClose(window))
{
glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
glOrtho(0.0, 1.0, 0.0, 1.0, -1.0, 1.0);
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
glColor3f(0.0f, 0.0f, 1.0f);
glBegin(GL_POLYGON);
glVertex3f(0.0, 0.0, 0.0);
glVertex3f(0.5, 0.0, 0.0);
glVertex3f(0.5, 0.5, 0.0);
glVertex3f(0.0, 0.5, 0.0);
glEnd();
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return EXIT_SUCCESS;
}
Or supply some shaders & use a VAO & VBO to upload your geometry.
This question already has answers here:
How can I make OpenGL draw something using VBOs in XCODE?
(3 answers)
Closed 8 years ago.
I am having trouble rendering a simple triangle. The code below compiles and runs, except there isn't any triangle; only a black background.
GLuint VBO;
static void RenderSceneCB()
{
//glClear sets the bitplane area of the window to values previously selected by glClearColor, glClearDepth, and glClearStencil.
glClear(GL_COLOR_BUFFER_BIT);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
//swaps the buffers of the current window if double buffered.
glutSwapBuffers();
}
static void InitializeGlutCallbacks()
{
glutDisplayFunc(RenderSceneCB);
}
static void CreateVertexBuffer()
{
Vector3f Vertices[3];
Vertices[0] = Vector3f(-1.0f, -1.0f, 0.0f);
Vertices[1] = Vector3f(1.0f, -1.0f, 0.0f);
Vertices[2] = Vector3f(0.0f, 1.0f, 0.0f);
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE|GLUT_RGBA);
glutInitWindowSize(1024, 768);
glutInitWindowPosition(100, 100);
glutCreateWindow("Tutorial 02");
InitializeGlutCallbacks();
// Must be done after glut is initialized!
GLenum res = glewInit();
if (res != GLEW_OK) {
fprintf(stderr, "Error: '%s'\n", glewGetErrorString(res));
return 1;
}
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
CreateVertexBuffer();
glutMainLoop();
return 0;
}
Since you do not have a shader, OpenGL does not know how to interpret your vertex attribute 0, as it only knows about position, color, etc. and not about generic attributes. Note that this may work on some GPUs as they will map the generic attributes to the same "slots", and zero would then be interpreted as position (typically NVidia is less strict with these issues).
You can use MiniShader, just put the following code after CreateVertexBuffer();:
minish::InitializeGLExts(); // initialize shading extensions
const char *vs =
"layout(location = 0) in vec3 pos;\n"
// the layout specifier binds this variable to vertex attribute 0
"void main()\n"
"{\n"
" gl_Position = gl_ModelViewProjectionMatrix * vec4(pos, 1.0);\n"
"}\n";
const char *fs = "void main() { gl_FragColor=vec4(.0, 1.0, .0, 1.0); }"; // green
minish::CompileShader(vs, fs);
Note that I wrote that from top of my head, in case there are some errors, please comment.
The goal of my program is to display a simple triangle in rotation using VBO.
The program fails (segmentation fault) at the glDrawArrays call.
Here's a part of my c++ code :
#include <iostream>
#include <SDL/SDL.h>
#include <SDL/SDL_image.h>
#include <gl/glew.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#define OFFSET_BUFFER(offset) ((char*)NULL + (offset))
GLfloat angle = 0.0f;
const static int WIDTH = 500;
const static int HEIGHT = 500;
struct Vertex
{
float x, y, z;
float nx, ny, nz;
float s0, t0;
};
Vertex vertices[3] =
{
1.000000f, -1.000000f, -1.000000f,
0.000000f, -1.000000f, 0.000000f,
0.626059f, 0.734295f,
1.000000f, -1.000000f, 1.000000f,
0.000000f, -1.000000f, 0.000000f,
0.626059f, 0.512602f,
-1.000000f, -1.000000f, 1.000000f,
0.000000f, -1.000000f, 0.000000f,
0.404365f, 0.512602f,
};
[...]
static GLuint applyVBO(void)
{
GLuint vboID;
glGenBuffers(1, &vboID);
glBindBuffer(GL_VERTEX_ARRAY, vboID);
glBufferData(GL_VERTEX_ARRAY, 3 * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_VERTEX_ARRAY, NULL);
return (vboID);
}
int main(int ac, char **av)
{
bool continuer = true;
SDL_Event event;
SDL_Init(SDL_INIT_VIDEO);
SDL_WM_SetCaption("VBO tests",NULL);
SDL_SetVideoMode(WIDTH, HEIGHT, 32, SDL_OPENGL);
if (glewInit() != GLEW_OK)
return (-1);
glEnable(GL_DEPTH_TEST);
glEnable(GL_LIGHTING);
glEnable(GL_LIGHT0);
//glEnable(GL_NORMALIZE);
//Initialize projection
glViewport(0, 0, WIDTH, HEIGHT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45.0, (float)WIDTH/(float)HEIGHT, 1.0f, 100.0f);
gluLookAt(0.0f, 0.0f, 8.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f);
//Initialize VBO
GLuint vboID = applyVBO();
//Initialize uv-mapping
GLuint texID = applyTexture("Box.bmp");
//Initialize camera
//Main loop
while (continuer)
{
eventListener(&event, &continuer);
glClearDepth(1.0f);
glClearColor(0.13f, 0.12f, 0.13f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glPushMatrix();
glRotatef(angle, 1.0f, 1.0f, 1.0f);
glBindBuffer(GL_VERTEX_ARRAY, vboID);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3, GL_FLOAT, sizeof(Vertex), OFFSET_BUFFER(0));
glEnableClientState(GL_NORMAL_ARRAY);
glNormalPointer(GL_FLOAT, sizeof(Vertex), OFFSET_BUFFER(12));
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glBindTexture(GL_TEXTURE_2D, texID);
glTexCoordPointer(2, GL_FLOAT, sizeof(Vertex), OFFSET_BUFFER(24));
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindTexture(GL_TEXTURE_2D, NULL);
glBindBuffer(GL_VERTEX_ARRAY, NULL);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
angle += 0.50f;
glPopMatrix();
glFlush();
SDL_GL_SwapBuffers();
}
SDL_Quit();
return (0);
}
Does anyone can help me?
The problem comes from that the GL_VERTEX_ARRAY flag is not the good one. They have to have to be replaced by GL_ARRAY_BUFFER.