Line Not Showing - c++

I have written this code for displaying the line segment but I am not able to understand why the line is not getting displayed. Can someone please help?
#include<GL/glut.h>
#include<iostream>
void init()
{
glClearColor(0.0, 0.0, 0.0, 0.0);
glMatrixMode(GL_PROJECTION);
gluOrtho2D(0.0, 200.0, 0.0, 150.0);
}
void line_segment()
{
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(1.0, 0.0, 0.0);
glBegin(GL_LINE);
glVertex2i(180, 15);
glVertex2i(10, 145);
glEnd();
glFlush();
}
void main(int argc, char** argv)
{
glutInit(&argc, argv);
// optional
glutInitWindowSize(400, 300);
glutInitWindowPosition(50, 100);
// Till here
glutCreateWindow("Window.....");
init();
glutDisplayFunc(line_segment);
// without infinite loop window onl displayed for a very short time
glutMainLoop();
}
OUTPUT

GL_LINE is not a valid line primitive type. GL_LINE is a polygon mode (see glPolygonMode). The primitive type you want to use is GL_LINES:
glBegin(GL_LINE);
glBegin(GL_LINES);

Related

I need to draw a point using OpenGL and GLUT, But I just get a blank black screen

I am trying to print a point using OpenGL and GLUT, but I just get blank screen when I run the following code. Any help will be appreciated.
Thank You.
#include<GL/glut.h>
void display() {
glColor3f(1.0, 0.0, 0.0);
glBegin(GL_POINTS);
glVertex2f(0.0, 0.0);
glEnd();
glFlush();
}
void main(int argc, char** argv) {
glutInit(&argc, argv);
glutInitWindowSize(640, 480);
glutCreateWindow("example");
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutDisplayFunc(display);
glutMainLoop();
}
You need to call glPointSize before glBegin. Example:
void display() {
glColor3f(1.0, 0.0, 0.0);
glPointSize(10.0f);
glBegin(GL_POINTS);
glVertex2f(0.0f, 0.0f);
glEnd();
glFlush();
}
If the point size is going to be the same each frame then you can just call glPointSize once on initialisation.

OpenGL code doesn't show anything in window mode

I wanna create a program with OpenGL in my PC. I use this code
#include "stdafx.h"
#include <GL/glut.h>
bool bFullsreen = false;
int nWindowID;
void display(void)
{
//Clear all pixels
glClear(GL_COLOR_BUFFER_BIT);
//draw white polygon (rectangle) with corners at
// (0.25, 0.25, 0.0) and (0.75, 0.75, 0.0)
glColor3f(1.0,1.0,1.0);
glBegin(GL_POLYGON);
glVertex3f(0.25, 0.25, 0.0);
glVertex3f(0.75, 0.25, 0.0);
glVertex3f(0.75, 0.75, 0.0);
glVertex3f(0.25, 0.75, 0.0);
glEnd();
// Don't wait start processing buffered OpenGL routines)
//glFlush();
glutSwapBuffers();
}
void init(void)
{
//select clearing (background) color
glClearColor(0.0, 0.0, 0.0, 0.0);
//initialize viewing values
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0, 1.0, 0.0, 1.0, -1.0, 1.0);
}
void keyboardFunc(unsigned char key, int x, int y) {
switch (key) {
case 'f':
bFullsreen = !bFullsreen;
if (bFullsreen)
glutFullScreen();
else {
glutSetWindow(nWindowID);
glutPositionWindow(100, 100);
glutReshapeWindow(640, 480);
}
break;
}
}
void idleFunc(void) {
glutPostRedisplay();
}
int _tmain(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitWindowSize(250,250);
glutInitWindowPosition(100,100);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH);
nWindowID = glutCreateWindow("Hello World");
init();
glutDisplayFunc(display);
glutKeyboardFunc(keyboardFunc);
glutIdleFunc(idleFunc);
glutMainLoop();
return 0;
}
when i run it in my PC it coesn't show anything and only show background. like this picture:
But when I switch to FullScreen mode, it shows correctly. I searched about it and I found changing GLUT_Single to GLUT_DOUBLE and glFlush() to glutSwapBuffers() but it doesn't work for me.
I have windows 10 with NVIDIA GForce 7300 LE. when I run my program in other PC it runs correctly. so
What is my problem? Is it about my graphic card or my code is incorrect? and
How can I fixed it?

losing some points while drawing points in openGL

I have made a c++ code in openGL to draw seven vertices. My sample code is as follows:
#include <GL/glut.h>
void init2D(float r, float g, float b)
{
glClearColor(r, g, b, 0.0);
glMatrixMode(GL_PROJECTION);
gluOrtho2D(0.0, 200.0, 0.0, 150.0);
glPointSize(4.0);
}
void display(void)
{
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(1.0, 0.0, 0.0);
//draw two points
glBegin(GL_POINTS);
glVertex2i(30, 30);
glVertex2i(47, 76);
glVertex2i(76, 150);
glVertex2i(130, 240);
glVertex2i(300, 200);
glVertex2i(250, 50);
glVertex2i(60, 20);
glEnd();
glFlush();
}
void main(int argc, char *argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(500, 500);
glutInitWindowPosition(100, 100);
glutCreateWindow("points and lines");
init2D(0.0, 0.0, 0.0);
glutDisplayFunc(display);
glutMainLoop();
}
But when I run the code, I am only getting 4 of them. Is there anything wrong in the code? Is there any way of displaying all the points?
gluOrtho2D(0.0, 200.0, 0.0, 150.0);
You are setting up your projection such that (0,0) maps to the bottom left corner, and (200, 150) to the top right corner, hence
glVertex2i(130, 240);
glVertex2i(300, 200);
glVertex2i(250, 50);
are all outside of the view volume.

OpenGL flickering screen

I've written a simple opengl running in my ubuntu laptop. It is a small solar system including the sun and the earth, the earth rotates around the sun. The problem with my program is the screen keep blinking continuously every time I try to run it.
#include <GL/glut.h>
#define SUN_RADIUS 0.4
#define EARTH_RADIUS 0.06
#define MOON_RADIUS 0.016
GLfloat EARTH_ORBIT_RADIUS = 0.9;
GLfloat year = 0.0;
void init() {
glClearColor(0.0, 0.0, 0.0, 0.0);
glClearDepth(10.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
void renderScene() {
gluLookAt(
0.0, 0.0, -4.0,
0.0, 0.0, 0.0,
0.0, 1.0, 0.0
);
glColor3f(1.0, 1.0, 0.7);
glutWireSphere(SUN_RADIUS, 50, 50);
glPushMatrix();
glRotatef(year, 0.0, 1.0, 0.0);
glTranslatef(EARTH_ORBIT_RADIUS, 0.0, 0.0);
glColor3f(0.0, 0.7, 1.0);
glutWireSphere(EARTH_RADIUS, 10, 10);
glPopMatrix();
}
void display() {
glClear(GL_COLOR_BUFFER_BIT);
renderScene();
glFlush();
glutSwapBuffers();
}
void idle() {
year += 0.2;
display();
}
int main(int argc, char** argv) {
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB);
glutInitWindowPosition(100, 100);
glutInitWindowSize(600, 600);
glutCreateWindow("Solar System");
init();
glutDisplayFunc(display);
glutIdleFunc(idle);
glutMainLoop();
}
gluLookAt() multiplies by the current matrix, it does not load a new one. Multiple gluLookAt()s multiplied together aren't very meaningful.
Reload proj/modelview matrices each frame, helps prevent matrix oddities.
Let GLUT do it's job, don't call display() from idle(), use glutPostRedisplay() instead. That way GLUT knows to call display() the next time through the event loop.
All together:
#include <GL/glut.h>
#define SUN_RADIUS 0.4
#define EARTH_RADIUS 0.06
#define MOON_RADIUS 0.016
GLfloat EARTH_ORBIT_RADIUS = 0.9;
GLfloat year = 0.0;
void renderScene()
{
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho( -1, 1, -1, 1, -100, 100 );
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt
(
0.0, 0.0, -4.0,
0.0, 0.0, 0.0,
0.0, 1.0, 0.0
);
glColor3f(1.0, 1.0, 0.7);
glutWireSphere(SUN_RADIUS, 50, 50);
glPushMatrix();
glRotatef(year, 0.0, 1.0, 0.0);
glTranslatef(EARTH_ORBIT_RADIUS, 0.0, 0.0);
glColor3f(0.0, 0.7, 1.0);
glutWireSphere(EARTH_RADIUS, 10, 10);
glPopMatrix();
}
void display()
{
glClearColor(0.0, 0.0, 0.0, 0.0);
glClearDepth(10.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
renderScene();
glutSwapBuffers();
}
void idle()
{
year += 0.2;
glutPostRedisplay();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB);
glutInitWindowPosition(100, 100);
glutInitWindowSize(600, 600);
glutCreateWindow("Solar System");
glutDisplayFunc(display);
glutIdleFunc( idle );
glutMainLoop();
}
This may be due to tearing if you aren't doing any kind of v-sync (Which it doesn't look like your code is). Try adding a sleep time to your display method (like sleep(500)). This isn't the correct way to fix this, but this will allow you to verify that it is the issue. If it is, look into adding v-sync to your application.

first OpenGL program from the Red book crashes when it returns

I read the "Red book"(OpenGL Programming Guide) and tried the first program in the book under windows 7 with VS2010.
The result can appear normally, but when the program returns, it crashes with the following information:
Unhandled exception at 0x6992c660 in first.exe: 0xC0000005: Access violation.
I have tried some other programs, which issued the same problem.
The following is the code:
#include <stdio.h>
#include <gl/glut.h>
void display(void)
{
/* clear all pixels */
glClear(GL_COLOR_BUFFER_BIT);
/* draw white polygon (rectangle) with corners at
* (0.25, 0.25, 0.0) and (0.75, 0.75, 0.0)
*/
glColor3f(1.0, 1.0, 1.0);
glBegin(GL_POLYGON);
glVertex3f(0.25, 0.25, 0.0);
glVertex3f(0.75, 0.25, 0.0);
glVertex3f(0.75, 0.75, 0.0);
glVertex3f(0.25, 0.75, 0.0);
glEnd();
/* don't wait!
* start processing buffered OpenGL routines
*/
glFlush();
}
void init(void)
{
/* select clearing (background) color */
glClearColor(0.0, 0.0, 0.0, 0.0);
/* initialize viewing values */
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0, 1.0, 0.0, 1.0, -1.0, 1.0);
}
/*
* Declare initial window size, position, and display mode
* (single buffer and RGBA). Open window with "hello"
* in its title bar. Call initialization routines.
* Register callback function to display graphics.
* Enter main loop and process events.
*/
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(250, 250);
glutInitWindowPosition(100, 100);
glutCreateWindow("hello");
init();
glutDisplayFunc(display);
glutMainLoop();
return 0;
}