The function glMultiDrawElements does not work correctly - opengl

I want to render a simple cube using the function 'glMultiDrawElements' (even if there is just one object!) but there is a bad display! However if I use the function 'glDrawElements' everything works! I want to precise 'glewInit' has been correctly initialized.
Here's the code using 'glDrawElements' :
[...]
std::vector<std::vector<int32_t>> indexArray = pRenderBatch->GetIndexAttribArray();
std::vector<int32_t> countArray = pRenderBatch->GetCountElementArray(indexArray);
// indexArray[0] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 0, 2, 19, 3, 5, 20, 21, 22, 10, 23, 11, 13, 24, 14, 25, 15, 17}
//Size(indexArray) = 1, Size(indexArray[0])=36
// countArray = {36} //Size(countArray)=1
glDrawElements(GL_TRIANGLES, countArray[0], GL_UNSIGNED_INT, (const GLvoid *)&indexArray[0][0]);
[...]
Now, here's the code using 'glMultiDrawElements' :
[...]
std::vector<std::vector<int32_t>> indexArray = pRenderBatch->GetIndexAttribArray();
std::vector<int32_t> countArray = pRenderBatch->GetCountElementArray(indexArray);
glMultiDrawElements(GL_TRIANGLES, &countArray[0], GL_UNSIGNED_INT, (const GLvoid **)&indexArray[0], countArray.size());
[...]
Here's the display:
However according the OpenGL documentation (http://www.opengl.org/sdk/docs/man3/xhtml/glMultiDrawElements.xml), the function 'glMultiDrawElements' has the following signature:
void glMultiDrawElements(GLenum mode, const GLsizei * count, GLenum type, const GLvoid ** indices, GLsizei primcount);
I don't understand why the display is not correct. Does anyone can help me?

std::vector<std::vector<int32_t>> indexArray
glMultiDrawElements() is expecting a pointer to an array of indexes. It doesn't know anything about the layout of std::vector<int32_t>.
Try this instead:
std::vector< int32_t* > indexArray2
You can construct it like this:
std::vector< int32_t* > indexArray2( indexArray.size() );
for( size_t i = 0; i < indexArray.size(); ++i )
{
indexArray2[i] = &indexArray[i][0];
}
EDIT: Complete example:
#include <GL/glew.h>
#include <GL/glut.h>
#include <vector>
using namespace std;
void display()
{
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
glOrtho( -2, 2, -2, 2, -1, 1 );
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
GLfloat verts[] =
{
0, 0,
1, 0,
1, 1,
0, 0,
-1, 0,
-1, -1,
};
vector< GLsizei > counts;
vector< vector< unsigned int > > indexes;
{
vector< unsigned int > index;
index.push_back( 0 );
index.push_back( 1 );
index.push_back( 2 );
counts.push_back( index.size() );
indexes.push_back( index );
}
{
vector< unsigned int > index;
index.push_back( 3 );
index.push_back( 4 );
index.push_back( 5 );
counts.push_back( index.size() );
indexes.push_back( index );
}
vector< unsigned int* > indexes2( indexes.size() );
for( size_t i = 0; i < indexes.size(); ++i )
{
indexes2[i] = &indexes[i][0];
}
glEnableClientState( GL_VERTEX_ARRAY );
glVertexPointer( 2, GL_FLOAT, 0, verts );
glMultiDrawElements
(
GL_TRIANGLES,
&counts[0],
GL_UNSIGNED_INT,
(const GLvoid **)&indexes2[0],
counts.size()
);
glutSwapBuffers();
}
int main( int argc, char **argv )
{
glutInit( &argc, argv );
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize( 600, 600 );
glutCreateWindow( "GLUT" );
glewInit();
glutDisplayFunc( display );
glutMainLoop();
return 0;
}

Related

How do I make cube faces opaque on OpenGL?

I'm writing a program to draw a cube on OpenGL and rotate it continuously on mouse clicks. At particular angles, I'm able to see through the cube (transparent). I've enabled Depth Test, so I don't know why this is happening. I am not sure if I have enabled it correctly.
#include <math.h>
#include <vector>
#include <Windows.h>
#include <gl\glut.h>
using namespace std;
void myInit() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glClearColor(0, 0, 0, 1);
glOrtho(-2, 2, -2, 2, 2, -2);
glMatrixMode(GL_MODELVIEW);
}
float Cube[][3] = { {-1, -1, -1}, {1, -1, -1}, {1, 1, -1}, {-1, 1, -1}, {-1, -1, 1}, {1, -1, 1}, {1, 1, 1}, {-1, 1, 1} };
float Colors[][3] = { {0, 0, 0}, {1, 0, 0}, {0, 1, 0}, {0, 0, 1}, {1, 1, 0}, {0, 1, 1}, {1, 0, 1}, {1, 1, 1} };
int axis = 0, theta[3] = {0, 0, 0};
void draw_face (int a, int b, int c, int d) {
glBegin(GL_QUADS);
glColor3fv(Colors[a]);
glVertex3fv(Cube[a]);
glColor3fv(Colors[b]);
glVertex3fv(Cube[b]);
glColor3fv(Colors[c]);
glVertex3fv(Cube[c]);
glColor3fv(Colors[d]);
glVertex3fv(Cube[d]);
glEnd();
}
void draw_cube () {
draw_face(0, 3, 2, 1);
draw_face(2, 3, 7, 6);
draw_face(0, 4, 7, 3);
draw_face(1, 2, 6, 5);
draw_face(4, 5, 6, 7);
draw_face(0, 1, 5, 4);
}
void spin_cube() {
theta[axis] += 2;
if (theta[axis] > 360)
theta[axis] = -360;
glutPostRedisplay();
}
void idle_func() {
Sleep(10);
spin_cube();
}
void mouse_func(int button, int state, int x, int y) {
if (button == GLUT_LEFT_BUTTON && state == GLUT_DOWN)
axis = 0;
else if (button == GLUT_MIDDLE_BUTTON && state == GLUT_DOWN)
axis = 1;
else if (button == GLUT_RIGHT_BUTTON && state == GLUT_DOWN)
axis = 2;
}
void myDrawing() {
glClear(GL_COLOR_BUFFER_BIT);
glPushMatrix();
glRotatef(theta[0], 1, 0, 0);
glRotatef(theta[1], 0, 1, 0);
glRotatef(theta[2], 0, 0, 1);
draw_cube();
glPopMatrix();
glFlush();
glutSwapBuffers();
}
int main(int argc, char *argv[]) {
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB);
glEnable(GL_DEPTH_TEST);
glutInitWindowSize(500, 500);
glutInitWindowPosition(0, 0);
glutCreateWindow("sample");
glutDisplayFunc(myDrawing);
glutIdleFunc(idle_func);
glutMouseFunc(mouse_func);
myInit();
glutMainLoop();
}
Multiple issues:
You aren't requesting a depth buffer from GLUT:
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB);
Solution: OR in GLUT_DEPTH to make sure GLUT requests some depth buffer bits from the OS during GL context creation:
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH);
You're calling glEnable(GL_DEPTH_TEST) before GLUT has created a GL context:
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB);
// no GL context yet
glEnable(GL_DEPTH_TEST);
Solution: Move the glEnable() to after glutCreateWindow() so it has a current GL context to work with:
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH);
glutInitWindowSize(500, 500);
glutInitWindowPosition(0, 0);
glutCreateWindow("sample");
glEnable(GL_DEPTH_TEST);
...
You never clear the depth buffer:
void myDrawing() {
// where's GL_DEPTH_BUFFER_BIT?
glClear(GL_COLOR_BUFFER_BIT);
...
Solution: OR in GL_DEPTH_BUFFER_BIT to your glClear() argument:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
All the fixes together:
#include <cmath>
#include <vector>
#include <GL/glut.h>
using namespace std;
void myInit() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glClearColor(0, 0, 0, 1);
glOrtho(-2, 2, -2, 2, 2, -2);
glMatrixMode(GL_MODELVIEW);
}
float Cube[][3] = { {-1, -1, -1}, {1, -1, -1}, {1, 1, -1}, {-1, 1, -1}, {-1, -1, 1}, {1, -1, 1}, {1, 1, 1}, {-1, 1, 1} };
float Colors[][3] = { {0, 0, 0}, {1, 0, 0}, {0, 1, 0}, {0, 0, 1}, {1, 1, 0}, {0, 1, 1}, {1, 0, 1}, {1, 1, 1} };
int axis = 0, theta[3] = {0, 0, 0};
void draw_face (int a, int b, int c, int d) {
glBegin(GL_QUADS);
glColor3fv(Colors[a]);
glVertex3fv(Cube[a]);
glColor3fv(Colors[b]);
glVertex3fv(Cube[b]);
glColor3fv(Colors[c]);
glVertex3fv(Cube[c]);
glColor3fv(Colors[d]);
glVertex3fv(Cube[d]);
glEnd();
}
void draw_cube () {
draw_face(0, 3, 2, 1);
draw_face(2, 3, 7, 6);
draw_face(0, 4, 7, 3);
draw_face(1, 2, 6, 5);
draw_face(4, 5, 6, 7);
draw_face(0, 1, 5, 4);
}
void spin_cube() {
theta[axis] += 2;
if (theta[axis] > 360)
theta[axis] = -360;
glutPostRedisplay();
}
void idle_func() {
Sleep(10);
spin_cube();
}
void mouse_func(int button, int state, int x, int y) {
if (button == GLUT_LEFT_BUTTON && state == GLUT_DOWN)
axis = 0;
else if (button == GLUT_MIDDLE_BUTTON && state == GLUT_DOWN)
axis = 1;
else if (button == GLUT_RIGHT_BUTTON && state == GLUT_DOWN)
axis = 2;
}
void myDrawing() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glPushMatrix();
glRotatef(theta[0], 1, 0, 0);
glRotatef(theta[1], 0, 1, 0);
glRotatef(theta[2], 0, 0, 1);
draw_cube();
glPopMatrix();
glFlush();
glutSwapBuffers();
}
int main(int argc, char *argv[]) {
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH);
glutInitWindowSize(500, 500);
glutInitWindowPosition(0, 0);
glutCreateWindow("sample");
glEnable(GL_DEPTH_TEST);
glutDisplayFunc(myDrawing);
glutIdleFunc(idle_func);
glutMouseFunc(mouse_func);
myInit();
glutMainLoop();
}

How convert text to pixel array in QT

I'm new user of QT. Is it possible to convert for example one character or string to array where 0 means white field and 1 means black field? For example when I have character "a" saved as QString or any representation of font and I would like to get following array:
int array[9][6] = { //"a" char array
{0, 0, 0, 0, 0, 0}, //- - - - - -
{0, 1, 1, 1, 1, 0}, //- * * * * -
{1, 0, 0, 0, 0, 1}, //* - - - - *
{0, 0, 0, 0, 0, 1}, //- - - - - *
{0, 1, 1, 1, 1, 1}, //- * * * * *
{1, 0, 0, 0, 0, 1}, //* - - - - *
{1, 0, 0, 0, 1, 1}, //* - - - * *
{0, 1, 1, 1, 0, 1}, //- * * * - *
{0, 0, 0, 0, 0, 0}, //- - - - - -
};
Is there any way in QT for reach that goal ? I hope I describe my problem as clear as possible and someone can help me with this?
And very important thing size array must depend on selected font size, so if I choose for example 30 pt font, array should be appropriate bigger than 10 pt font.
Thank You very much.
Best regards.
You can draw text onto a bitmap image, which will give you a bitmap containing text. Which is a pixel array, for all intents and purposes.
Note that you will also have to use a "bitmap font", regular fonts won't really do the trick as most have been created to utilize antialiasing and won't result in a crisp and readable bitmap image.
Qt also offers font metrics, which can give you a clear idea how big text or individual characters are.
I misinterpreted the question on my first read through... well, here is what was asked for...
#include <QApplication>
#include <QPixmap>
#include <QLabel>
#include <QPainter>
#include <QDebug>
#include <QVector>
#include <QFontMetrics>
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
QSize size;
// QPixmap pixmap(100,100);
QImage image(20,20, QImage::Format_Mono);// Change the width and height here as needed if your letters get too big
image.fill(0);
{
QPainter painter(&image);
//// alternative to fill method
// painter.setBrush(Qt::black);
// painter.drawRect(0,0,image.width(), image.height());
painter.setPen(Qt::white);
QFontMetrics fm(painter.font());
size.setHeight(fm.height());
size.setWidth(fm.width('a'));
painter.drawText(0,fm.height(),"a");
}
QLabel label;
label.setPixmap(QPixmap::fromImage(image).scaled(400,400, Qt::KeepAspectRatio));
label.show();
QVector <QVector <int> > array(size.height(), QVector < int> (size.width(), 0));
for(int r = 0; r < size.height(); r++)
for(int c = 0; c < size.width(); c++)
array[r][c] = (image.pixelColor(c,r) == Qt::white ? 1 : 0);
// qDebug() << size << array;
foreach(QVector <int> row, array)
qDebug() << row;
return a.exec();
}
Output:
QVector(0, 0, 0, 0, 0, 0)
QVector(0, 0, 0, 0, 0, 0)
QVector(0, 0, 0, 0, 0, 0)
QVector(0, 0, 0, 0, 0, 0)
QVector(0, 0, 0, 0, 0, 0)
QVector(0, 0, 0, 0, 0, 0)
QVector(0, 0, 0, 0, 0, 0)
QVector(0, 1, 1, 1, 0, 0)
QVector(0, 0, 0, 0, 1, 0)
QVector(0, 1, 1, 1, 1, 0)
QVector(1, 0, 0, 0, 1, 0)
QVector(1, 0, 0, 0, 1, 0)
QVector(0, 1, 1, 1, 1, 0)

Use uniform buffer of uvec3[]

I have the following uniform buffer:
uniform EDGE_ID_TO_START_POS{
uint[12*3] pos;
} edgeIdToStartPos;
Writing to this buffer works perfectly fine:
#define EDGE_ID_TO_START_POS_SIZE (12*3)
const uint32_t edgeIdToStartPos_constBuffer[EDGE_ID_TO_START_POS_SIZE] = {
/* 0*/ 0, 0, 0, /* 1*/ 0, 1, 0, /* 2*/ 1, 0, 0, /* 3*/ 0, 0, 0,
/* 4*/ 0, 0, 1, /* 5*/ 0, 1, 1, /* 6*/ 1, 0, 1, /* 7*/ 0, 0, 1,
/* 8*/ 0, 0, 0, /* 9*/ 0, 1, 0, /*10*/ 1, 1, 0, /*11*/ 1, 0, 0
};
glBindBuffer(GL_UNIFORM_BUFFER, ubo);
glBufferData(GL_UNIFORM_BUFFER, EDGE_ID_TO_START_POS_SIZE * sizeof(uint32_t), edgeIdToStartPos_constBuffer, GL_STATIC_DRAW);
glBindBuffer(GL_UNIFORM_BUFFER, 0);
Now I want to change the uniform buffer to the following structure:
uniform EDGE_ID_TO_START_POS{
uvec3[12] pos;
} edgeIdToStartPos;
This structure makes more sense and is easier to use. And it should have the same memory layout than the previous version (?).
However, when reading from this struct, only the vectors 0..3 contain data. All uvecs with an index of 4 or higher contain zeros.
Why does this happen and how can I initialise such a uniform buffer correctly?
(C/C++, glew, glfw)
A uvec3 has an alignment of 16 bytes, so you must add padding. See OpenGL 4.5 spec, section 7.6.22:
If the member is a three-component vector with components consuming N basic machine units, the base alignment is 4N.
Here is how it would look:
static const int EDGE_ID_TO_START_POS_SIZE = 12 * 4;
const uint32_t edgeIdToStartPos_constBuffer[EDGE_ID_TO_START_POS_SIZE] = {
/* 0*/ 0, 0, 0, 0, /* 1*/ 0, 1, 0, 0, /* 2*/ 1, 0, 0, 0, /* 3*/ 0, 0, 0, 0,
/* 4*/ 0, 0, 1, 0, /* 5*/ 0, 1, 1, 0, /* 6*/ 1, 0, 1, 0, /* 7*/ 0, 0, 1, 0,
/* 8*/ 0, 0, 0, 0, /* 9*/ 0, 1, 0, 0, /*10*/ 1, 1, 0, 0, /*11*/ 1, 0, 0, 0,
};

How can I properly pass sprites to a std::Vector without destroying their texture?

From the official SFML tutorials, The White Box Problem:-
"When you set the texture of a sprite, all it does internally is store a pointer to the texture instance. Therefore, if the texture is destroyed or moves elsewhere in memory, the sprite ends up with an invalid texture pointer." Thus a sprite without a texture will be seen.
I have a class called World. In this class I made a 2d integer array called level and a vector of type Block called blocks. Now I wanted to store the 'Block' objects inside the vector whenever level[ i ][ j ] = 1.
header file of the 'World' class:-
#ifndef WORLD_H
#define WORLD_H
#include <vector>
#include "Block.h"
#include "Grass.h"
#include <SFML/Graphics.hpp>
using namespace std;
class World
{
public:
World();
void draw(sf::RenderWindow *window);
vector<Block> blocks;
private:
int level[12][16];
int wd;
int hi;
};
#endif // WORLD_H
cpp file of the 'World' class :-
#include "World.h"
#include "Grass.h"
#include "Block.h"
#include <iostream>
#include <SFML/Graphics.hpp>
using namespace std;
World::World() : level{
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1},
{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1},
{1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1},
{1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1},
{1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1},
{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}
}
{
wd = 16;
hi = 12;
int count = 1;
//make a 'Block' object and pass it in the vector when level[ i ][ j ] = 1.
for(int i = 0; i<hi; i++)
{
for(int j = 0; j<wd; j++)
{
if(level[i][j] == 1)
{
Block block(j*50, i*50);
blocks.push_back(block);
}
}
}
}
void World::draw(sf::RenderWindow *window)
{
for(unsigned int i = 0; i<blocks.size(); i++)
{
blocks[i].draw(window);
}
}
The 'Block' class has two members - sf::Texture blockT and sf::Sprite block. It also has a draw(RenderWindow *window) method. This is how the 'Block' class is made :-
header file for block class
#ifndef BLOCK_H
#define BLOCK_H
#include <iostream>
#include <SFML/Graphics.hpp>
using namespace std;
class Block
{
public:
Block(float x, float y);
void draw(sf::RenderWindow *window);
private:
sf::Texture blockT;
sf::Sprite block;
};
#endif // BLOCK_H
cpp file for 'Block' class
#include "Block.h"
#include <iostream>
#include <SFML/Graphics.hpp>
using namespace std;
Block::Block(float px, float py)
{
if(!(blockT.loadFromFile("textures/block.png")))
{
cout<<"Could not load block texture."<<endl;
}
block.setTexture(blockT);
block.setPosition(sf::Vector2f(px, py));
cout<<px<<endl;
cout<<py<<endl;
}
void Block::draw(sf::RenderWindow *window)
{
window->draw(block);
}
When I run the program, in place of blocks, only white box is shown. I don't understand how the texture is getting destroyed. This is what the output looks like :-
As you can see, the white places are sprites each of size 50*50 without any texture.
You should customise copy-construction of blocks so it updates the texture pointer, something like:
Block::Block(const Block& other)
: blockT(other.blockT), block(other.block)
{
block.setTexture(blockT);
}
That will be used when push_back() in the vector forces a resize, and the newly allocated, larger buffer's elements are copy-constructed from the old elements before the latter are "destructed" and deallocated.
It would be a good idea to support the same kind of update for assignment, i.e. operator=(const Block& rhs).
Regarding your "Thus a sprite without a texture will be seen." - it's much more likely that the behaviour is undefined since you're effectively following a pointer to released memory that could get corrupted with new content at any time, and happens to manifest as a lack of texture currently in your testing, but might crash and burn at some other optimisation level, after some minor code changes, on another compiler or OS etc..
Your solution of having a Block class which stores its own instance of an sf::Texture is going to cause you to have duplicate copies of textures hanging around in memory. It's also going to require you to learn and follow the rule of three when you're dealing with your Block objects as per Tony D's answer.
The simpler solution is to have a separate std::map of filenames to sf::Textures into which you can load the textures that you require once, and retrieve everywhere you need them.
// Have one of these, maybe as a member of your World class?
std::map<std::string,sf::Texture> textures;
// load your textures into it ...
Then in your Block class ...
class Block
{
public:
// constructor now takes a reference to a texture map ...
Block(float x, float y,std::map<std::string,sf::Texture>& textures);
And in the implementation you can retrieve the texture you want by its filename, and assign it t the sprite with setTexture.

How to make action on keyboard input be instant

I am writing in C++ and using OpenGL in Windows.
I've created a cube and I want it to be rotated around the y axis ( using glRotate3f(), not gluLookat() ) by pressing '4' or '6' numpad keys.
The problem is that when I press any key, there is a slight rotation and then stops for a while and then it starts rotating continuously.
Is there any way to achieve this by using glutkeyboardfunc?
If not, how can it be achieved?
Maintain your own key state map and trigger redraws on a timer:
// g++ main.cpp -o main -lglut -lGL -lGLU && ./main
#include <GL/glut.h>
#include <map>
bool paused = false;
std::map< unsigned char, bool > state;
void keyboard_down( unsigned char key, int x, int y )
{
if( key == 'p' )
paused = !paused;
state[ key ] = true;
}
void keyboard_up( unsigned char key, int x, int y )
{
state[ key ] = false;
}
void display()
{
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
double w = glutGet( GLUT_WINDOW_WIDTH );
double h = glutGet( GLUT_WINDOW_HEIGHT );
double ar = w / h;
glOrtho( -2 * ar, 2 * ar, -2, 2, -1, 1);
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
static float angle = 0;
if( state[ '6' ] ) angle -= 3;
if( state[ '4' ] ) angle += 3;
if( !paused )
angle += 0.5;
glPushMatrix();
glRotatef( angle, 0, 0, 1 );
glColor3ub( 255, 0, 0 );
glBegin( GL_QUADS );
glVertex2i( -1, -1 );
glVertex2i( 1, -1 );
glVertex2i( 1, 1 );
glVertex2i( -1, 1 );
glEnd();
glPopMatrix();
glutSwapBuffers();
}
void timer( int extra )
{
glutPostRedisplay();
glutTimerFunc( 16, timer, 0 );
}
int main( int argc, char **argv )
{
glutInit( &argc, argv );
glutInitDisplayMode( GLUT_RGBA | GLUT_DEPTH | GLUT_DOUBLE );
glutInitWindowSize( 640, 480 );
glutCreateWindow( "Keyboard" );
glutDisplayFunc( display );
glutKeyboardFunc( keyboard_down );
glutKeyboardUpFunc( keyboard_up );
glutTimerFunc( 0, timer, 0 );
glutIgnoreKeyRepeat( GL_TRUE );
glutMainLoop();
return 0;
}