Can't draw Bresenham, OpenGL - c++

I am implementing the Bresenham algorithm, and I am pretty certain that my implementation is correct, but the screen is still blank, I think I'm using OpenGL wrong.
This is my full program. I am using nupengl.core.0.1.0.1 if it helps
#include <GL\glew.h>
#include <GL\freeglut.h>
#include <iostream>
#include <fstream>
#include <math.h>
#define Round(a) (int)(a+0.5)
#define max(a,b) (a>b)?a:b
using namespace std;
/*truct vectors = {
vector
}*/
void init(void) {
glClearColor(1.0, 0.0, 0.0, 1.0);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0.0, 200.0, 0.0, 150.0);
}
class Shape {
public:
virtual void Draw() = 0;
};
class lineBressenham : public Shape {
int x1, y1, x2, y2;
public:
lineBressenham(int a1, int b1, int a2, int b2) {
x1 = a1;
y1 = b1;
x2 = a2;
y2 = b2;
}
void Draw() {
int dx = x2 - x1;
int dy = y2 - y1;
int x = x1;
int y = y1;
int p = 2 * dy - dx;
glBegin(GL_POINTS);
while (x < x2) {
if (p >= 0) {
glVertex2i(x, y);
y = y + 1;
p = p + 2 * dy - 2 * dx;
}
else {
glVertex2i(x, y);
p = p + 2 * dy;
}
x = x + 1;
}
glEnd();
}
};
void displayWindow(void) {
ifstream infile;
glClear(GL_COLOR_BUFFER_BIT); // settings for buffer.
glColor3f(0.0, 0.0, 0.0);
glPointSize(3.0);
while (!infile.eof()) {
int a;
int x1 = 3, y1 = 4, x2 = 7, y2 = 9;
Shape* shape;
shape = new lineBressenham(x1, y1, x2, y2);
shape->Draw();
glutSwapBuffers();
//glFlush(); // draw everything in input, buffer in one time.
}
}
int main(int argc, char** argv) {
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(500, 500); // window size
glutInitWindowPosition(0, 0); // distance from the top-left screen
glutCreateWindow("Show the window"); // message displayed on top bar window
glewInit();
if (glewIsSupported("GL_VERSION_3_3")) {
std::cout << " GLEW Version is 3.3\n ";
}
else {
std::cout << "GLEW 3.3 not supported\n ";
}
glutDisplayFunc(displayWindow);
init();
glutMainLoop();
return 0;
}

I fixed it. The problem was I did not initialize the gluOrtho2D and glMatrixMode. The full code is
#include <GL\glew.h>
#include <GL\freeglut.h>
#include <iostream>
#include <fstream>
#include <math.h>
GLsizei windows_witdh = 800;
GLsizei windows_height = 600;
class lineBressenham {
int x1, y1, x2, y2;
public:
lineBressenham(int a1, int b1, int a2, int b2) {
x1 = a1;
y1 = b1;
x2 = a2;
y2 = b2;
}
void Draw() {
int dx = x2 - x1;
int dy = y2 - y1;
int x = x1;
int y = y1;
int p = 2 * dy - dx;
glBegin(GL_POINTS);
while (x < x2) {
if (p >= 0) {
glVertex2i(x, y);
y = y + 1;
p = p + 2 * dy - 2 * dx;
}
else {
glVertex2i(x, y);
p = p + 2 * dy;
}
x = x + 1;
}
glEnd();
}
};
void displayWindow(void) {
glClear(GL_COLOR_BUFFER_BIT); // settings for buffer.
glColor3f(1.0, 0.3, 0.7);
glPointSize(1.0);
int x1 = 30, y1 = 40, x2 = 700, y2 = 300;
lineBressenham *shape = new lineBressenham(x1, y1, x2, y2);;
shape->Draw();
glutSwapBuffers();
glFlush();
}
void init() {
// Reset coordinate system
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0.0, windows_witdh, windows_height, 0.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
int main(int argc, char** argv) {
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(500, 500); // window size
glutInitWindowPosition(0, 0); // distance from the top-left screen
glutCreateWindow("Bressenham"); // message displayed on top bar window
glLoadIdentity();
init();
if (glewIsSupported("GL_VERSION_3_3")) {
std::cout << " GLEW Version is 3.3\n ";
}
else {
std::cout << "GLEW 3.3 not supported\n ";
}
glutDisplayFunc(displayWindow);
glutMainLoop();
return 0;
}

Related

Bresenham's algorithm in OpenGL

This is my fist time studying Computer Graphics and i was given the code below from my professor. I think i have understood how the algorithm is implemented in general, but what i fail to understand is in what way the incx,incy,inc1,inc2 lines work. I obviously know they are there to increase something but since i haven't fully understood the algorithm i'm kinda baffled. This is the full code:
#include <gl/glut.h>
#include <stdio.h>
int xstart, ystart, xend, yend;
void myInit() {
glClear(GL_COLOR_BUFFER_BIT);
glClearColor(0.0, 0.0, 0.0, 1.0);
glMatrixMode(GL_PROJECTION);
gluOrtho2D(0, 500, 0, 500);
}
void draw_pixel(int x, int y) {
glBegin(GL_POINTS);
glVertex2i(x, y);
glEnd();
}
void draw_line(int xstart, int xend, int ystart, int yend) {
int dx, dy, i, e;
int incx, incy, inc1, inc2;
int x,y;
dx = xend-xstart;
dy = yend-ystart;
if (dx < 0) dx = -dx;
if (dy < 0) dy = -dy;
incx = 1;
if (xend < xstart) incx = -1;
incy = 1;
if (yend < ystart) incy = -1;
x = xstart;
y = ystart;
if (dx > dy) {
draw_pixel(x, y);
e = 2 * dy-dx;
inc1 = 2*(dy-dx);
inc2 = 2*dy;
for (i=0; i<dx; i++) {
if (e >= 0) {
y += incy;
e += inc1;
}
else
e += inc2;
x += incx;
draw_pixel(x, y);
}
} else {
draw_pixel(x, y);
e = 2*dx-dy;
inc1 = 2*(dx-dy);
inc2 = 2*dx;
for (i=0; i<dy; i++) {
if (e >= 0) {
x += incx;
e += inc1;
}
else
e += inc2;
y += incy;
draw_pixel(x, y);
}
}
}
void myDisplay() {
draw_line(xstart, xend, ystart, yend);
glFlush();
}
void main(int argc, char **argv) {
printf( "Enter (xstart, ystart, xend, yend)\n");
scanf("%d %d %d %d", &xstart, &ystart, &xend, &yend);
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE|GLUT_RGB);
glutInitWindowSize(500, 500);
glutInitWindowPosition(0, 0);
glutCreateWindow("Bresenham's Line Algorithm Visualization");
myInit();
glutDisplayFunc(myDisplay);
glutMainLoop();
}

I tried to run OpenGl but the GLUT screen is just rendering a white screen?

I have just tried to run the Bresenham function which draws a line from 2 vertexes, but only the white screen of GLUT appear, there is no line at all. Here is my source code, can anybody help me?
#include <math.h>
#include <windows.h>
#include <GL/glut.h>
#include <GL/gl.h>
void init(void)
{
glClearColor(1.0, 1.0, 1.0, 0.0);
glMatrixMode(GL_PROJECTION);
gluOrtho2D(0.0, 500.0, 0.0, 500.0);
}
void Bresenham(int x1, int y1, int x2, int y2) {
int Dx = abs(x2 - x1);
int Dy = abs(y2 - y1);
int p = 2 * Dy - Dx;
int c1 = 2 * Dy;
int c2 = 2 * (Dy - Dx);
int x = x1;
int y = y1;
int x_unit = 1, y_unit = 1;
glVertex2d(x, y);
while (x != x2) {
if (p<0) p += c1;
else {
p += c2;
y += y_unit;
}
x += x_unit;
glVertex2d(x, y);
}
}
void display(void)
{
glClear(GL_COLOR_BUFFER_BIT);
glBegin(GL_POLYGON);
Bresenham(50, 150, 300, 200);
glEnd();
glFlush();
}
int main(int iArgc, char** cppArgv) {
glutInit(&iArgc, cppArgv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(600, 600);
glutInitWindowPosition(50, 50);
glutCreateWindow("Line");
init();
int x1=50, y1=150, x2=300, y2=200;
glutDisplayFunc(display);
glutMainLoop();
return EXIT_SUCCESS;
}
Make sure your background color doesn't match your drawing color. Kinda hard to see white-on-white :)
Drawing the line in red works on my machine:
#include <GL/glut.h>
void Bresenham(int x1, int y1, int x2, int y2)
{
int Dx = abs(x2 - x1);
int Dy = abs(y2 - y1);
int p = 2 * Dy - Dx;
int c1 = 2 * Dy;
int c2 = 2 * (Dy - Dx);
int x = x1;
int y = y1;
int x_unit = 1, y_unit = 1;
glVertex2d(x, y);
while (x != x2)
{
if (p<0)
p += c1;
else
{
p += c2;
y += y_unit;
}
x += x_unit;
glVertex2d(x, y);
}
}
void display(void)
{
glClearColor(1.0, 1.0, 1.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0, 500.0, 0.0, 500.0, -1, 1);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glBegin(GL_POLYGON);
glColor3ub( 255, 0, 0 );
Bresenham(50, 150, 300, 200);
glEnd();
glutSwapBuffers();
}
int main(int argc, char** argv )
{
glutInit( &argc, argv );
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB);
glutInitWindowSize(600, 600);
glutCreateWindow("Line");
glutDisplayFunc(display);
glutMainLoop();
return 0;
}

Determining whether a user specified point is inside a 2d polygon

I've done some research and there are many methods to doing this if the vertices and points are predetermined as seen here. However in my case, everything is specified by the user. The code I have put together (with the help of others), allows the user to create the polygon, and place points. I wrote functions to try and create vectors from the point to the vertices and then compute the angle. If it comes to 360 it should be inside and it should be colored green. Else it should be outside and red.
This is what I've been working on, but I cant seem to figure it out:
(Edit: Included my entire code)
GLint vert[100][2];
int width = 400, height = 600, n = 0, m = 0, type = GL_LINE_STRIP, v;
bool rubberbanding = false;
bool closed = false;
double testx, testy;
bool isitin;
double dotp(double x1, double y1, double x2, double y2) {
double a;
a = (x1 * x2) + (y1 * y2);
return a;
}
double mag(double x1, double y1, double x2, double y2) {
double a;
double x = (x2 - x1);
double y = (y2 - y1);
a = sqrt((x*x) + (y*y));
return a;
}
bool inpoly(int numv, GLint vx[][2], GLint vy[][2], double tx, double ty) {
double angle = 0.0;
int n = 0;
while (n != numv) {
//vector from point to vertex
double newv1x = vx[n][0] - tx;
double newv1y = vy[n][1] - ty;
double magv1 = mag(tx, ty, vx[n][0], vy[n][1]); //magnitude of vector
//vector from point to next vertex
double newv2x = vx[n + 1][0] - tx;
double newv2y = vy[n + 1][1] - ty;
double magv2 = mag(tx, ty, vx[n+1][0], vy[n+1][1]);//magnitude of vector
//dot product between the two vectors
double dp = dotp(newv1x, newv1y, newv2x, newv2y);
//angle between two vectors
double vang = acos(dp / (magv1*magv2));
angle += vang;
n++;
}
//vector from point to last vertex
double newv1x = vx[numv][0] - tx;
double newv1y = vy[numv][1] - ty;
double magv1 = mag(tx, ty, vx[numv][0], vy[numv][1]); //magnitude of vector
//vector from point to first vertex
double newv2x = vx[0][0] - tx;
double newv2y = vy[0][1] - ty;
double magv2 = mag(tx, ty, vx[0][0], vy[0][1]);//magnitude of vector
//dot product between the two vectors
double dp = dotp(newv1x, newv1y, newv2x, newv2y);
//angle between two vectors
double vang = acos(dp / (magv1*magv2));
angle += vang;
if (angle == 360.0) return true;
return false;
}
void display(){
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(1, 1, 0);
glBegin(closed ? GL_LINE_LOOP : GL_LINE_STRIP);
for(int i = 0; i < m; i++){
glVertex2iv(vert[i]);
}
glEnd();
/*
glColor3f(0, 0, 1);
glBegin(GL_POINTS);
for (int i = m; i < n; i++) {
glVertex2iv(vert[i]);
}
*/
isitin = inpoly(m, vert, vert, testx, testy);
if (isitin == true) {
glColor3f(0, 1, 0);
glBegin(GL_POINTS);
for (int i = m; i < n; i++) {
glVertex2iv(vert[i]);
}
}
else {
glColor3f(1, 0, 0);
glBegin(GL_POINTS);
for (int i = m; i < n; i++) {
glVertex2iv(vert[i]);
}
}
glEnd();
glutSwapBuffers();
glutPostRedisplay();
}
void keyboard(unsigned char key, int x, int y){
switch(key){
case 'r': n = 0; m = 0; closed = false; break;
case 'c': closed = true; break;
}
glutPostRedisplay();
}
int findVertex(int x, int y){
int dx, dy;
for(int i = 0; i < n; i++){
dx = vert[i][0] - x;
dy = vert[i][1] - y;
if(dx*dx + dy*dy < 16) return i;
}
return - 1;
}
void mousemove(int x, int y)
{
testx = x;
testy = height - 1 - y;
}
void mouse(int button, int state, int x, int y){
switch(button){
case GLUT_LEFT_BUTTON:
if(state == GLUT_DOWN){
if (n < 100) {
v = n++;
vert[v][0] = x;
vert[v][1] = height - 1 - y;
// n++;
rubberbanding = true;
glutPostRedisplay();
if (!closed) m = n;
}
}
else{
rubberbanding = false;
}
break;
}
}
void motion(int x, int y){
if(rubberbanding){
vert[v][0] = x;
vert[v][1] = height - 1 - y;
glutPostRedisplay();
}
}
void main(int argc, char** argv){
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGB|GLUT_DOUBLE);
glutInitWindowSize(width,height);
glutInitWindowPosition(50,100);
glutCreateWindow("Project 3");
glClearColor(0.0,0.0,0.0,0.0);
glColor3f( 1, 1, 0);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0, width, 0, height);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glutDisplayFunc(display);
glutKeyboardFunc(keyboard);
glutMouseFunc(mouse);
glutPassiveMotionFunc(mousemove);
glutMotionFunc(motion);
glutMainLoop();
}
When I run the program in Visual Studio, I can draw the polygon, and I can specify
points, but for some reason all the points appear red. If anyone has any ideas on how to fix this, it would be greatly appreciated.
Probably your error is that acos returns in radians, and you're testing if the sum equals 360 degrees.
Also, you shouldn't compare doubles that way, since that calculation is probably adding rounding error in each sum. See here for more information.

How to draw a Bezier curve with C++ in OpenGL using floating point values

I am trying to draw a Bezier curve in OpenGL using floating point values. I have tried using many different code examples. My current code below, runs ,but does not show the curve on screen. The usual way to draw Bezier curves are with integer values, which means using the GLUORTHO2D() function for drawing the curve. But I want to draw a curve using floating point values. Such as x range(-1,1) and y range(-1,1).
like if x=(500) then consider it (-1 to 1) and if y=(800) then consider it (-1,1).
I have already tried using integer values and it worked for me. my code using integer values is below:
#include <GL/glut.h>
#include <math.h>
#include <stdio.h>
#define CTRL_COUNT 100
int ctrlPointsCount;
int ctrlPointsX[CTRL_COUNT], ctrlPointsY[CTRL_COUNT];
int X1[3]={20,25,20}, Y1[3]={5,24,38}; //first point(x1[0],y1[0]) second(x1[1],y1[1]) third(x1[2],y1[2])
void myInit()
{
glClearColor(0.0,0.0,0.0,0.0);
glColor3f(1.0,0.0,0.0);
glPointSize(8.0);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0.0,128.0,0.0,96.0);
}
//p(t)=(1-t)^3*p0+3t(1-t)^2*p1+3t^2(1-t)p2+t^3p3
float getNextBezierPointX(float t)
{
float x=0.0;
for(int i=0; i<ctrlPointsCount; i++)
{
int c;
if(i==0 || i==ctrlPointsCount-1)
c = 1;
else
{
c = ctrlPointsCount-1;
}
x += c * pow(t, i) * pow(1-t, ctrlPointsCount-1-i) * ctrlPointsX[i];
}
return x;
}
float getNextBezierPointY(float t)
{
float y=0.0;
for(int i=0; i<ctrlPointsCount; i++)
{
int c;
if(i==0 || i==ctrlPointsCount-1)
c = 1;
else
{
c = ctrlPointsCount-1;
}
y += c * pow(t, i) * pow(1-t, ctrlPointsCount-1-i) * ctrlPointsY[i];
}
return y;
}
void drawline()
{
// draw control points using red color
for(int i=0; i < 3; i++)
{
glBegin(GL_POINTS);
glVertex2i(ctrlPointsX[i], ctrlPointsY[i]);
glEnd();
glFlush();
}
// draw bezier curve using control poitns by calculating next points using cubic bezier curve formula
float oldX=ctrlPointsX[0], oldY=ctrlPointsY[0];
for(double t = 0.0;t <= 1.0; t += 0.01) {
float x = getNextBezierPointX(t);
float y = getNextBezierPointY(t);
//glColor3f(1.0,t,1.0);
glColor3f(1.0,1.0,1.0);
glBegin(GL_LINES);
glVertex2f(oldX, oldY);
glVertex2f(x, y);
glEnd();
glFlush();
oldX = x;
oldY = y;
}
}
void myDisplay()
{
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(1.0,0.0,0.0);
ctrlPointsCount=3;
for(int i=0;i<3;i++)
{
ctrlPointsX[i] = X1[i];
ctrlPointsY[i] = Y1[i];
}
drawline();
glFlush();
}
int main(int argc, char *argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE|GLUT_RGB);
glutInitWindowSize(640,480);
glutInitWindowPosition(100,150);
glutCreateWindow("Bezier Curve");
glutDisplayFunc(myDisplay);
myInit();
glutMainLoop();
return 0;
}
But when i tried using floating point values , it does not work for me. It does not show the curved line on screen. My code using floating point values is below:
#include <iostream>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <GL/glut.h>
using namespace std;
#define CTRL_COUNT 100
int ctrlPointsCount;
int ctrlPointsX[CTRL_COUNT], ctrlPointsY[CTRL_COUNT];
double X1[3] = { 0.26015037593985, 0.43609022556391, 0.6 }, Y1[3] = { 0.946875, 0.884375, 0.946875 };
//Initializes 3D rendering
void initRendering() {
glEnable(GL_DEPTH_TEST);
}
float getNextBezierPointX(float t)
{
float x = 0.0;
for (int i = 0; i<ctrlPointsCount; i++)
{
int c;
if (i == 0 || i == ctrlPointsCount - 1)
c = 1;
else
{
c = ctrlPointsCount - 1;
}
x += c * pow(t, i) * pow(1 - t, ctrlPointsCount - 1 - i) * ctrlPointsX[i];
}
return x;
}
float getNextBezierPointY(float t)
{
float y = 0.0;
for (int i = 0; i<ctrlPointsCount; i++)
{
int c;
if (i == 0 || i == ctrlPointsCount - 1)
c = 1;
else
{
c = ctrlPointsCount - 1;
}
y += c * pow(t, i) * pow(1 - t, ctrlPointsCount - 1 - i) * ctrlPointsY[i];
}
return y;
}
void drawline()
{
// draw control points using red color
for (int i = 0; i < 3; i++)
{
glBegin(GL_POINTS);
glVertex2i(ctrlPointsX[i], ctrlPointsY[i]);
glEnd();
glFlush();
}
// draw bezier curve using control poitns by calculating next points using cubic bezier curve formula
float oldX = ctrlPointsX[0], oldY = ctrlPointsY[0];
for (double t = 0.0; t <= 1.0; t += 0.01)
{
float x = getNextBezierPointX(t);
float y = getNextBezierPointY(t);
//glColor3f(1.0,t,1.0);
glColor3f(1.0, 1.0, 1.0);
glBegin(GL_LINES);
glVertex2f(oldX, oldY);
glVertex2f(x, y);
glEnd();
glFlush();
oldX = x;
oldY = y;
}
}
//Called when the window is resized
void handleResize(int w, int h) {
glViewport(0, 0, w, h);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45.0, (double)w / (double)h, 1.0, 200.0);
}
float _angle = 0.0;
float _cameraAngle = 0.0;
float _ang_tri = 0.0;
//Draws the 3D scene
void drawScene() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity(); //Reset the drawing perspective
ctrlPointsCount = 3;
for (int i = 0; i<3; i++)
{
ctrlPointsX[i] = X1[i];
ctrlPointsY[i] = Y1[i];
}
drawline();
glutSwapBuffers();
}
void update(int value) {
_angle += 2.0f;
if (_angle > 360) {
_angle -= 360;
}
_ang_tri += 2.0f;
if (_ang_tri > 360) {
_ang_tri -= 360;
}
glutPostRedisplay(); //Tell GLUT that the display has changed
//Tell GLUT to call update again in 25 milliseconds
glutTimerFunc(25, update, 0);
}
int main(int argc, char** argv) {
//Initialize GLUT
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH);
glutInitWindowSize(1331, 641);
glutInitWindowPosition(0, 0);
//Create the window
glutCreateWindow("Our cg project");
initRendering();
//Set handler functions
glutDisplayFunc(drawScene);
glutReshapeFunc(handleResize);
glutTimerFunc(25, update, 0); //Add a timer
glClearColor(0.0, 0.7, 1.5,0.0);
glutMainLoop();
return 0;
}
The problem is this here:
int ctrlPointsX[CTRL_COUNT], ctrlPointsY[CTRL_COUNT];
double X1[3] = { 0.26015037593985, 0.43609022556391, 0.6 }, Y1[3] = {0.946875, 0.884375, 0.946875 };
for (int i = 0; i<3; i++)
{
ctrlPointsX[i] = X1[i];
ctrlPointsY[i] = Y1[i];
}
ctrlPointsX and ctrlPointsYcan only hold integer values. So when you do ctrlPointsX[i] = X1[i] and ctrlPointsY[i] = Y1[i] you are converting the floats to integers, which will round them down. So all your controlPoints will be 0.
You have to declare the controlPoints arrays as type double too:
double ctrlPointsX[CTRL_COUNT], ctrlPointsY[CTRL_COUNT];
double X1[3] = { 0.26015037593985, 0.43609022556391, 0.6 }, Y1[3] = {0.946875, 0.884375, 0.946875 };
This should fix your problem.

Why doesn't this midpoint line algorithm draw vertical lines (or diagonals that approach the vertical)?

I implemented this algorithm I found online, and it draws horizontal lines and half of all diagonals just fine. But as the diagonal line passes the "halfway" mark to becoming vertical, it doesn't update y and only draws the line with a smaller slope. It does draw it all the way to x2, but draws a line sloped to a different y2 coordinate. And if you try to draw a vertical line, nothing is drawn. Can anyone find the bug?
void init()
{
glClearColor(0.0, 0.0, 0.0, 0.0);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0.0, 500.0, 0.0, 500.0);
}
void midPoint(int x1, int y1, int x2, int y2)
{
if (x1 > x2)
{
midPoint(x2, y2, x1, y1);
return;
}
int slope;
int dx, dy, d, x, y;
dx = x2 - x1;
dy = y2 - y1;
d = dx - 2 * dy;
y = y1;
if (dy < 0) {
slope = -1;
dy = -dy;
}
else {
slope = 1;
}
for (x = x1; x < x2; x++) {
glBegin(GL_POINTS);
glVertex2f(x, y);
if (d <= 0) {
d += 2 * dx - 2 * dy;
y += slope;
}
else {
d += -2 * dy;
}
glEnd();
}
}
void display()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glColor3f(1.0, 0.0, 0.0);
midPoint(10, 10, 110, 210);
glColor3f(0.0, 1.0, 0.0);
midPoint(10, 10, 210, 110);
glColor3f(1.0, 1.0, 0.0);
midPoint(210, 10, 10, 110);
glFlush();
}
int main(int argc, char *argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowPosition(50, 50);
glutInitWindowSize(500, 500);
glutCreateWindow("Bresenham\'s midpoint line algorithm");
init();
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
Bresenham's line drawing algorithm can't plot vertical lines.
To plot vertical line use DDA line drawing algorithm. But the problem with DDA is that, it won't be accurate.
Following Wikipedia's definition the implementation would look like:
void midPoint(int x1, int y1, int x2, int y2)
{
bool steep = abs(y2 - y1) > abs(x2 - x1);
if(steep) {
// swap x1 and y1
int tmp = x1;
x1 = y1;
y1 = tmp;
// swap x2 and y2
tmp = x2;
x2 = y2;
y2 = tmp;
}
if(x1 > x2) {
// swap x1 and x2
int tmp = x1;
x1 = x2;
x2 = tmp;
// swap y1 and y2
tmp = y1;
y1 = y2;
y2 = tmp;
}
int dx, dy, error, ystep, y;
dx = x2 - x1;
dy = abs(y2 - y1);
error = dx / 2;
y = y1;
if(y1 < y2)
ystep = 1;
else
ystep = -1;
glBegin(GL_POINTS);
for (x = x1; x <= x2; x++) {
if(steep)
glVertex2f(y, x);
else
glVertex2f(x, y);
error -= dy;
if (error < 0) {
y += ystep;
error += dx;
}
}
glEnd();
}
(couldn't test yet but fixed in regard to aschepler's comment)
I was getting the same problem and solution seems quiet simple.The while loop is the key to get the vertical line rendered correctly.
The code below is code from my graphics project with Java. z coordinate is for z buffer.
Notice that the loop runs dx times when sampling is along x-axis and it runs dy times when sampling is along y-axis
display.setColor(Color.BLACK);
dx=x2-x1;
dy=y2-y1;
if(dx<0)
xinc=-1;
else
xinc=1;
if(dy<0)
yinc=-1;
else
yinc=1;
zinc=z2-z1;
dx=Math.abs(dx);dy=Math.abs(dy);
if(dx>dy){
zinc/=dx;
dy=dy<<1;
p=dy-dx;
y2=dx;
dx=dx<<1;
display.drawPixel(x1, y1, z1);
for(j=0;j<y2;j++){
z1+=zinc;
if(p>=0){
p-=dx;
y1+=yinc;
}
x1+=xinc;
p+=dy;
display.drawPixel(x1, y1, z1);
}
}
else{
zinc/=dy;
dx=dx<<1;
p=dx-dy;
x2=dy;
dy=dy<<1;
display.drawPixel(x1, y1, z1);
for(j=0;j<x2;j++){
z1+=zinc;
if(p>=0){
p-=dy;
x1+=xinc;
}
y1+=yinc;
p+=dx;
//System.out.println("Pixel : "+x1+" ,"+y1);
display.drawPixel(x1, y1, z1);
}
}