I mean for the following code to draw a horizontal line across the screen. Instead, it draws the line and then draws a line that fades off towards the origin. A picture is posted below.
I think that the critical pieces of code are
float vertices[] =
{
-0.5, 0.7, 1, 1, 1,
0.5, 0.7, 1, 1, 1
};
and
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
while (not glfwWindowShouldClose(window))
{
glClearColor(0,0,0,1);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_LINE_STRIP, 0, sizeof(vertices)/sizeof(float));
glfwSwapBuffers(window);
glfwPollEvents();
}
The full code is
# include <GL/glew.h>
# include <GLFW/glfw3.h>
const GLchar * vertex_shader_source =
"\
# version 150 core\n\
in vec2 position;\
in vec3 color;\
out vec3 Color;\
void main() { Color = color; gl_Position = vec4(position, 0, 1); }\
";
const GLchar * fragment_shader_source =
"\
# version 150 core\n\
in vec3 Color;\
out vec4 outColor;\
void main() { outColor = vec4(Color, 1.0); }\
";
float vertices[] =
{
-0.5, 0.7, 1, 1, 1,
0.5, 0.7, 1, 1, 1
};
int main (int argc, char ** argv)
{
// ---- INITIALIZE STUFF ---- //
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow * window = glfwCreateWindow(800, 600, "open-gl", nullptr, nullptr);
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
glewInit();
// ---- MAKE SHADERS ---- //
GLuint vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, &vertex_shader_source, NULL);
glCompileShader(vertex_shader);
GLuint fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, &fragment_shader_source, NULL);
glCompileShader(fragment_shader);
GLuint shader_program = glCreateProgram();
glAttachShader(shader_program, vertex_shader);
glAttachShader(shader_program, fragment_shader);
glBindFragDataLocation(shader_program, 0, "outColor");
glLinkProgram(shader_program);
glUseProgram(shader_program);
// ---- MAKE VERTEX BUFFER OBJECT ---- //
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
// ---- MAKE VERTEX ARRAY OBJECT ---- //
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
GLint posAttrib = glGetAttribLocation(shader_program, "position");
GLint colAttrib = glGetAttribLocation(shader_program, "color");
glEnableVertexAttribArray(posAttrib);
glEnableVertexAttribArray(colAttrib);
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 5*sizeof(float), 0);
glVertexAttribPointer(colAttrib, 3, GL_FLOAT, GL_FALSE, 5*sizeof(float), (void*)(2*sizeof(float)));
// ---- DO OTHER THINGS ---- //
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
while (not glfwWindowShouldClose(window))
{
glClearColor(0,0,0,1);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_LINE_STRIP, 0, sizeof(vertices)/sizeof(float));
glfwSwapBuffers(window);
glfwPollEvents();
}
// ---- CLEAN UP ---- //
glDeleteProgram(shader_program);
glDeleteShader(vertex_shader);
glDeleteShader(fragment_shader);
glDeleteVertexArrays(1, &vao);
glDeleteBuffers(1, &vbo);
glfwTerminate();
return 0;
}
The output is
I have no idea what the problem is; I have searched the internet, but I cannot find anyone who has had a similar problem. The best that I have found is someone who said that OpenGL implementations do not tend to do lines very well. This does not happen with GL_LINES, however.
I am using OpenGL 3.2 with GLFW and GLEW. I have an Acer Aspire v5-571P-6648; I do not know specifically what model of graphics card it has, but I can look for it.
You last argument for
glDrawArrays(GL_LINE_STRIP, 0, sizeof(vertices)/sizeof(float));
is wrong, it should be 2 (see https://www.opengl.org/sdk/docs/man/html/glDrawArrays.xhtml).
Related
I writed code which must to show triangle but it doesn't. I really don't know where is problem. I checked everything and don't find out what is the problem. If you can please help me.
Here is code:
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <glm.hpp>
int width = 800, height = 600;
int main() {
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 5);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(width, height, "engine", NULL, NULL);
glfwMakeContextCurrent(window);
gladLoadGLLoader((GLADloadproc)glfwGetProcAddress);
glViewport(0, 0, width, height);
const char* VertexShaderData = "#version 450 core\n"
"layout(location = 0) in vec2 pos;\n"
"void main(){\n"
"gl_Position = vec4(pos, 0, 1);\n"
"}\0";
GLuint VertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(VertexShader, 1, &VertexShaderData, NULL);
glCompileShader(VertexShader);
const char* FragmentShaderData = "#version 450 core\n"
"out vec4 Color;\n"
"void main(){\n"
"Color = vec4(0.25, 0.8, 0.65, 1);\n"
"}\0";
GLuint FragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(FragmentShader, 1, &FragmentShaderData, NULL);
glCompileShader(FragmentShader);
GLuint ShaderProgram = glCreateProgram();
glAttachShader(ShaderProgram, VertexShader);
glAttachShader(ShaderProgram, FragmentShader);
glLinkProgram(ShaderProgram);
float Points[] = { -1, -1, 0, 1, 1, -1 };
GLuint VAO, VBO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2*sizeof(float), (void*)0);
glBufferData(GL_ARRAY_BUFFER, 6, Points, GL_STATIC_DRAW);
glClearColor(0.3, 0.5, 0.7, 1);
while (!glfwWindowShouldClose(window)) {
glfwSwapBuffers(window);
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(VAO);
glEnableVertexAttribArray(0);
glUseProgram(ShaderProgram);
glDrawArrays(GL_TRIANGLES, 0, 3);
}
}
Before I was believe what problem is in array, but now i don't think so. Problem is must be in something else.
The 2nd argument to glBufferData is the size of the buffer in bytes:
glBufferData(GL_ARRAY_BUFFER, 6, Points, GL_STATIC_DRAW);
glBufferData(GL_ARRAY_BUFFER, 6*sizeof(float), Points, GL_STATIC_DRAW);
An OpenGL project can't display any object anymore. I tried to remake everything from scratch but it still doesn't works.
Main Code
#include <vector>
#include <iostream>
#include <glm/glm.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <imgui.h>
#include <imgui_impl_glfw_gl3.h>
#include "Loader.h"
void on_error(int error, const char* description)
{
std::cout << "GLFW error " << error << " : \"" << description << "\"" << std::endl;
}
int main()
{
//Init glfw
glfwSetErrorCallback(on_error);
if (!glfwInit()) return -1;
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
//Init window
auto window = glfwCreateWindow(1920, 1080, "gl_Crane", NULL, NULL);
if (!window) { glfwTerminate(); return -1; }
glfwMakeContextCurrent(window);
//Init glew
glewExperimental = true;
if (glewInit() != GLEW_OK) { glfwTerminate(); return -1; }
//Some opengl options
glEnable(GL_DEPTH_TEST);
glEnable(GL_DEBUG_OUTPUT);
glDepthFunc(GL_LESS);
//glEnable(GL_CULL_FACE);
//matrices
std::vector<glm::vec3> vertices = {
{-.2f, -.2f, 0}, {0, .2f, 0}, {.2f, -.2f, 0}
};
std::vector<glm::vec3> colors = {
{1, 0, 0}, {0, 1, 0}, {0, 0, 1}
};
std::vector<GLushort> indexes = {
0, 1, 2
};
//vertexArray
GLuint vertex_array;
glGenVertexArrays(1, &vertex_array);
glBindVertexArray(vertex_array);
//vertexbuffer
GLuint vertex_buffer;
glGenBuffers(1, &vertex_buffer);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(glm::vec3) * vertices.size(), vertices.data(), GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(glm::vec3), nullptr);
//colorbuffer
GLuint color_buffer;
glGenBuffers(1, &color_buffer);
glBindBuffer(GL_ARRAY_BUFFER, color_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(glm::vec3) * colors.size(), colors.data(), GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(glm::vec3), nullptr);
//indexbuffer
GLuint index_buffer;
glGenBuffers(1, &index_buffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, index_buffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLushort) * indexes.size(), indexes.data(), GL_STATIC_DRAW);
glBindVertexArray(0);
//Init shader
auto shader_program = new ShaderProgram;
shader_program->initFromFiles("../Crane/simple.vert", "../Crane/simple.frag");
//shader_program->addUniform("MVP");
ImGui_ImplGlfwGL3_Init(window, true);
glfwSwapInterval(1);
while (!glfwWindowShouldClose(window))
{
ImGui_ImplGlfwGL3_NewFrame();
ImGui::Text("Application average %.3f ms/frame (%.1f FPS)", 1000.0f / ImGui::GetIO().Framerate, ImGui::GetIO().Framerate);
//maj viewport
int display_w, display_h;
glfwGetFramebufferSize(window, &display_h, &display_w);
glViewport(0, 0, display_w, display_h);
//clear screen
glClearColor(.2f, .2f, .2f, 0);
glClear(GL_COLOR_BUFFER_BIT);
//draw stuff
shader_program->use();
glBindVertexArray(vertex_array);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
//auto mvp = glm::mat4(1);
//glUniformMatrix4fv(shader_program->uniform("MVP"), 1, GL_FALSE, glm::value_ptr(mvp));
glDrawElements(GL_TRIANGLES, indexes.size(), GL_UNSIGNED_SHORT, nullptr);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glBindVertexArray(0);
shader_program->disable();
ImGui::Render();
glfwSwapBuffers(window);
glfwPollEvents();
}
shader_program->disable();
ImGui_ImplGlfwGL3_Shutdown();
glfwTerminate();
return 0;
}
Fragment shader
#version 430
in vec3 fColors;
out vec4 fragColors;
void main()
{
fragColors = vec4(fColors, 1.0);
}
Vertex shader
#version 430
layout (location = 0) in vec4 vertexPosition;
layout (location = 1) in vec3 vertexColor;
out vec3 fColors;
void main()
{
fColors = vertexColor;
gl_Position = vertexPosition;
}
Additionally, I use the shader loader from here :
r3dux shader loader
In your program the Depth Test (glEnable(GL_DEPTH_TEST)).
The depth of a fragment is stored in a separate buffer. This buffer has to be cleared too, at the begin of every frame, as you do it with the color buffer. See glClear:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
Of course, if you would disable the depth test, then you would "see" the triangle, too.
I have written a simple openGL program to draw a triangle on the screen. I have done debugging with glGetError() and now there is no error in the code but when I try to run it only a black screen comes up.
here is my code. I am using GLFW for window creation.
#include<glew.h>
#include<glfw3.h>
#include<stdio.h>
int main(int argc, char ** argv)
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow* window = glfwCreateWindow(800, 600, "OpenGL", nullptr, nullptr);
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
glewInit();
float vertices[] = {
0.0f, 0.5f,
0.5f, -0.5f,
-0.5f, -0.5f
};
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
const GLchar * vs =
"#version 150\n"
"in vec2 position;\n"
"void main() {\n"
"vec4 gl_Position = vec4( position , 0.0 , 1.0 );\n"
"}";
const GLchar * fs =
"#version 150\n"
"out vec4 out_color; \n"
"void main() { \n"
"out_color = vec4(1.0, 1.0, 1.0, 1.0);\n"
"}";
GLuint vsh = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vsh, 1, &vs, NULL);
glCompileShader(vsh);
GLint status;
glGetShaderiv(vsh, GL_COMPILE_STATUS, &status);
if (status == GL_TRUE) printf("Vertex Shader Compiled success\n");
GLuint fsh = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fsh, 1, &fs, NULL);
glCompileShader(fsh);
glGetShaderiv(fsh, GL_COMPILE_STATUS, &status);
if (status == GL_TRUE) printf("Fragment Shader Compiled success\n");
GLuint sp = glCreateProgram();
glAttachShader(sp, vsh);
glAttachShader(sp, fsh);
glBindFragDataLocation(sp, 0, "out_color");
glBindAttribLocation(sp,1,"position");
glLinkProgram(sp);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(1);
glUseProgram(sp);
while (!glfwWindowShouldClose(window))
{
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 0;
}
Update
I have narrowed down the issue to just one line of code
GLint pos = glGetAttribLocation(sp, "position") //sp is shader program
The problem is it is returning -1. I have read in tutorials that if you don't use a variable it will be optimized out by the compiler.I have used the position in the code then why it is getting thrown away. Below is my vertex shader.
const GLchar * vs =
"#version 150\n"
"in vec2 position;\n"
"void main() {\n"
"vec4 gl_Position = vec4( position , 0.0 , 1.0 );\n"
"}";
just add these lines after
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
I mean use location 0 instead of 1
hope this helps
I'm following open.gl and I have gotten to the rendering stage but my triangle does not appear on the screen. Only a black box.
I'm on Win7, latest drivers and everything, VS2013.
To make it easy I put the code here:
#define GLEW_STATIC
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
using namespace std;
int main() {
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow* window = glfwCreateWindow(800, 600, "OpenGL", nullptr, nullptr);
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
glewInit();
float vertices[] = {
0.0f, 0.5f,
0.5f, -0.5f,
-0.5f, -0.5f
};
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
const char* vertexShaderCode =
"#version 150\n"
"in vec2 position;"
"void main() {"
" gl_Position = vec4(position, 0.0, 1.0);"
"}";
const char* fragmentShaderCode =
"#version 150\n"
"out vec4 outColor;"
"void main() {"
" outColor = vec4(1.0, 1.0, 1.0, 1.0);"
"}";
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderCode, NULL);
glCompileShader(vertexShader);
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderCode, NULL);
glCompileShader(fragmentShader);
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glBindFragDataLocation(shaderProgram, 0, "outColor");
glLinkProgram(shaderProgram);
glUseProgram(shaderProgram);
GLint posAttrib = glGetAttribLocation(shaderProgram, "position");
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(posAttrib);
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
while(!glfwWindowShouldClose(window)) {
glfwPollEvents();
glfwSwapBuffers(window);
glDrawArrays(GL_TRIANGLES, 0, 3);
}
glfwTerminate();
}
Bind the VAO before you specify the vertex layout:
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
GLint posAttrib = glGetAttribLocation(shaderProgram, "position");
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(0);
Check all glFw commands for failure. For debugging check glGetError after every OpenGL command. Also make sure your shaders have compiled correctly, if not make sure you print the error messages.
Lastly, check the viewport.
I'm trying to render a simple test shader with the opengl 3.3 core profile. but everything I get is a black window.
GLFWwindow* window;
GLuint vao;
GLuint vbo[2];
GLuint program;
const GLfloat square[8] = {
-1.0, -1.0,
-1.0, 1.0,
1.0, 1.0,
1.0, -1.0
};
const GLfloat indices[4] = { 0, 1, 2, 3 };
init opengl core context and window
if( !glfwInit() ) {
std::cerr << "Failed to initialize GLFW\n";
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
// Open a window and create its OpenGL context
window = glfwCreateWindow( 1024, 768, "", 0, 0);
if( window == NULL ) {
std::cerr << "Failed to open GLFW window.\n";
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Initialize GLEW
if (gl3wInit()) {
std::cerr << "Failed to initialize GLEW" << std::endl;
return -1;
}
if (!gl3wIsSupported(3, 3)) {
std::cerr << "OpenGL Version 3.3 not supported" << std::endl;
return -1;
}
init vbo and its index buffer, then vao, and the shader program, bind the vertex shader input to 0
glGenBuffers(2, vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo[0]);
glBufferData(GL_ARRAY_BUFFER, 8*sizeof(GLfloat), square, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, vbo[1]);
glBufferData(GL_ARRAY_BUFFER, 4*sizeof(GLushort), indices, GL_STATIC_DRAW);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
program = glCreateProgram();
GLuint vertex_shader, fragment_shader;
loadShader_FILE(vertex_shader, "shader/default.vsh", GL_VERTEX_SHADER);
glAttachShader(program, vertex_shader);
loadShader_FILE(fragment_shader, "shader/default.fsh", GL_FRAGMENT_SHADER);
glAttachShader(program, fragment_shader);
glBindAttribLocation(program, 0, "pos");
glLinkProgram(program);
start rendering
glUseProgram(program);
glBindBuffer(GL_ARRAY_BUFFER, vbo[0]);
glVertexAttribPointer(
0,
2,
GL_FLOAT,
GL_FALSE,
sizeof(GLfloat)*2,
(void*)0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo[1]);
glDrawElements(
GL_TRIANGLE_STRIP,
4,
GL_UNSIGNED_SHORT,
(void*)0);
glDisableVertexAttribArray(0);
glfwSwapBuffers(window);
vertex shader
#version 330
in vec2 pos;
out vec2 c;
void main(void)
{
gl_Position = vec4(pos, 0.0, 1.0);
c = (pos+1)*0.5;
}
fragment shader
#version 330
in vec2 c;
out vec4 color;
void main(void)
{
color = vec4(c, 1, 1);
}
the shaders compile without errors, apitrace can't find any opengl errors
Well, this cannot be good:
const GLfloat indices[4] = { 0, 1, 2, 3 };
You told OpenGL that those were unsigned shorts, but they are floating-point. Nevermind the fact that GLfloat is twice the size of GLushort, the way those numbers are represented is very different. Floating-point vertex indices do not make a whole lot of sense.
Instead, you should use:
const GLushort indices[4] = { 0, 1, 2, 3 };