I made sample code using the imGUI & VTK in visual studio 2019.
refference sites : https://github.com/trlsmax/imgui-vtk & https://github.com/ocornut/imgui
VTK version : 9.2(latest version)
I'm doing with VTK viewer in imGUI. But, I faced on the problems as below.
======
code
#pragma once
////#include <vtkAutoInit.h>
////
////#define vtkRederingCore_AUTOINIT 3(vtkRenderingOpenGL2, vtkInteractionStyle, vtkRenderingFreeType)
////#define vtkRenderingContext2D_AUTOINIT 1(vtkRenderingContextOpenGL2)
#include <vtkAutoInit.h>
#define vtkRenderingCore_AUTOINIT 3(vtkRenderingOpenGL2,vtkInteractionStyle, vtkRenderingFreeType)
#define vtkRenderingContext2D_AUTOINIT 1(vtkRenderingContextOpenGL2)
#include <vtkActor.h>
#include <vtkCylinderSource.h>
#include <vtkSmartPointer.h>
#include <vtkContourFilter.h>
#include <vtkMath.h>
#include <vtkNamedColors.h>
#include <vtkPointData.h>
#include <vtkPolyDataMapper.h>
#include <vtkProperty.h>
#include <vtkShortArray.h>
#include <vtkStructuredPoints.h>
static vtkSmartPointer<vtkActor> SetupDemoCylinder()
{
auto colors = vtkSmartPointer<vtkNamedColors>::New();
// Set the background color.
std::array<unsigned char, 4> bkg{ {26, 51, 102, 255} };
colors->SetColor("BkgColor", bkg.data());
// This creates a polygonal cylinder model with eight circumferential facets
// (i.e, in practice an octagonal prism).
auto cylinder = vtkSmartPointer<vtkCylinderSource>::New();
cylinder->SetResolution(8);
// The mapper is responsible for pushing the geometry into the graphics
// library. It may also do color mapping, if scalars or other attributes are
// defined.
auto cylinderMapper = vtkSmartPointer<vtkPolyDataMapper>::New();
cylinderMapper->SetInputConnection(cylinder->GetOutputPort());
// The actor is a grouping mechanism: besides the geometry (mapper), it
// also has a property, transformation matrix, and/or texture map.
// Here we set its color and rotate it around the X and Y axes.
auto cylinderActor = vtkSmartPointer<vtkActor>::New();
cylinderActor->SetMapper(cylinderMapper);
cylinderActor->GetProperty()->SetColor(colors->GetColor4d("Tomato").GetData());
cylinderActor->RotateX(30.0);
cylinderActor->RotateY(-45.0);
return cylinderActor;
}
main.cpp
auto actor = SetupDemoCylinder();
// Setup window
glfwSetErrorCallback(glfw_error_callback);
if (!glfwInit()) {
return 1;
}
// Use GL 3.2 (All Platforms)
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
// Decide GLSL version
#ifdef __APPLE__
// GLSL 150
const char* glsl_version = "#version 150";
#else
// GLSL 130
const char* glsl_version = "#version 130";
#endif
// Create window with graphics context
GLFWwindow* window = glfwCreateWindow(1280, 720, "Dear ImGui VTKViewer Example", NULL, NULL);
if (window == NULL) {
return 1;
}
glfwMakeContextCurrent(window);
glfwSwapInterval(1); // Enable vsync
// Initialize OpenGL loader
if (gl3wInit() != 0) {
fprintf(stderr, "Failed to initialize OpenGL loader!\n");
return 1;
}
// Setup Dear ImGui context
IMGUI_CHECKVERSION();
ImGui::CreateContext();
ImGuiIO& io = ImGui::GetIO(); (void)io;
io.ConfigFlags |= ImGuiConfigFlags_DockingEnable; // Enable Docking
io.ConfigFlags |= ImGuiConfigFlags_ViewportsEnable; // Enable Multi-Viewport / Platform Windows'
// Setup Dear ImGui style
ImGui::StyleColorsDark();
// Setup Platform/Renderer backends
ImGui_ImplGlfw_InitForOpenGL(window, true);
ImGui_ImplOpenGL3_Init(glsl_version);
// Initialize VtkViewer objects
VtkViewer vtkViewer1;
vtkViewer1.addActor(actor);
VtkViewer vtkViewer2;
vtkViewer2.addActor(actor);
vtkViewer2.getRenderer()->SetBackground(0, 0, 0); // Black background
// Our state
bool show_demo_window = true;
bool show_another_window = false;
bool vtk_2_open = true;
ImVec4 clear_color = ImVec4(0.45f, 0.55f, 0.60f, 1.00f);
// Main loop
while (!glfwWindowShouldClose(window))
{
// 4. Show a simple VtkViewer Instance (Always Open)
ImGui::SetNextWindowSize(ImVec2(360, 240), ImGuiCond_FirstUseEver);
ImGui::Begin("Vtk Viewer 1", nullptr, VtkViewer::NoScrollFlags());
vtkViewer1.render(); // default render size = ImGui::GetContentRegionAvail()
ImGui::End();
// 5. Show a more complex VtkViewer Instance (Closable, Widgets in Window)
ImGui::SetNextWindowSize(ImVec2(720, 480), ImGuiCond_FirstUseEver);
if (vtk_2_open) {
ImGui::Begin("Vtk Viewer 2", &vtk_2_open, VtkViewer::NoScrollFlags());
// Other widgets can be placed in the same window as the VTKViewer
// However, since the VTKViewer is rendered to size ImGui::GetContentRegionAvail(),
// it is best to put all widgets first (i.e., render the VTKViewer last).
// If you want the VTKViewer to be at the top of a window, you can manually calculate
// and define its size, accounting for the space taken up by other widgets
auto renderer = vtkViewer2.getRenderer();
if (ImGui::Button("VTK Background: Black")) {
renderer->SetBackground(0, 0, 0);
}
ImGui::SameLine();
if (ImGui::Button("VTK Background: Red")) {
renderer->SetBackground(1, 0, 0);
}
ImGui::SameLine();
if (ImGui::Button("VTK Background: Green")) {
renderer->SetBackground(0, 1, 0);
}
ImGui::SameLine();
if (ImGui::Button("VTK Background: Blue")) {
renderer->SetBackground(0, 0, 1);
}
static float vtk2BkgAlpha = 0.2f;
ImGui::SliderFloat("Background Alpha", &vtk2BkgAlpha, 0.0f, 1.0f);
renderer->SetBackgroundAlpha(vtk2BkgAlpha);
vtkViewer2.render();
ImGui::End();
}
ImGui::Render();
int display_w, display_h;
glfwGetFramebufferSize(window, &display_w, &display_h);
glViewport(0, 0, display_w, display_h);
glClear(GL_COLOR_BUFFER_BIT);
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
// Update and Render additional Platform Windows
if (io.ConfigFlags & ImGuiConfigFlags_ViewportsEnable)
{
GLFWwindow* backup_current_context = glfwGetCurrentContext();
ImGui::UpdatePlatformWindows();
ImGui::RenderPlatformWindowsDefault();
glfwMakeContextCurrent(backup_current_context);
}
glfwSwapBuffers(window);
}
======
I attaced the code
enter image description here
I want to solve the ploblem.
Related
I've tried to set the window icon from the scripts within the stackoverflow questions below, but nothing did work to my solution
GLFW SetWindowIcon
https://learn.microsoft.com/en-us/windows/win32/menurc/using-icons
My entire main.cpp from my solution:
#include "imgui/imgui.h"
#include "imgui/imgui_impl_glfw.h"
#include "imgui/imgui_impl_opengl3.h"
#include <string>
#include <iostream>
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <Windows.h>
#include <WinUser.h>
#include <WinNls32.h>
int main()
{
// Create a custom icon at run time.
// Initialize GLFW
glfwInit();
// Tell GLFW what version of OpenGL we are using
// In this case we are using OpenGL 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
// Tell GLFW we are using the CORE profile
// So that means we only have the modern functions
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// Create a GLFWwindow object of 800 by 800 pixels, naming it "YoutubeOpenGL"
GLFWwindow* window = glfwCreateWindow(1200, 700, "Dessor 0.1.0c", NULL, NULL);
// Error check if the window fails to create
if (window == NULL)
{
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
// Introduce the window into the current context
glfwMakeContextCurrent(window);
//Load GLAD so it configures OpenGL
gladLoadGL();
// Specify the viewport of OpenGL in the Window
// In this case the viewport goes from x = 0, y = 0, to x = 800, y = 800
glViewport(0, 0, 1200, 700);
// Initialize ImGUI
IMGUI_CHECKVERSION();
ImGui::CreateContext();
ImGuiIO& io = ImGui::GetIO(); (void)io;
ImGui::StyleColorsDark();
ImGui_ImplGlfw_InitForOpenGL(window, true);
ImGui_ImplOpenGL3_Init("#version 330");
// Variables to be changed in the ImGUI window
bool drawCube = false;
float size = 1.0f;
float color[4] = { 1.0f, 1.0f, 1.0f, 1.0f };
// Main while loop
while (!glfwWindowShouldClose(window))
{
// Specify the color of the background
glClearColor(0.11f, 0.11f, 0.11f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// Clean the back buffer and assign the new color to it
// Tell OpenGL a new frame is about to begin
ImGui_ImplOpenGL3_NewFrame();
ImGui_ImplGlfw_NewFrame();
ImGui::NewFrame();
if (drawCube)
glClearColor(0.01f, 0.01f, 0.01f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// ImGUI window creation
// Particular widget styling
static int i2 = 3;
static int ifov = 60;
static float size = 3.0;
static char name[128] = "5";
static const ImVec4 edge_color = ImVec4(0.25f, 0.25f, 0.90f, 1.00f);
static const ImVec4 inside_color = ImVec4(0.55f, 0.55f, 0.90f, 1.00f);
const ImVec2 size2 = ImVec2(250, 200);
ImGui::PushStyleColor(ImGuiCol_Text, IM_COL32(255, 0, 0, 255));
ImGui::Begin("Drawing HyperCube Options", NULL, ImGuiWindowFlags_AlwaysAutoResize);
ImGui::PopStyleColor();
ImGui::Checkbox("Draw Shape", &drawCube);
ImGui::SliderInt("D", &i2, 0, atoi(name), "%d-dimensional hypercube");
ImGui::InputText("", name, 7, ImGuiInputTextFlags_CharsDecimal | ImGuiInputTextFlags_EnterReturnsTrue);
ImGui::SameLine();
ImGui::TextColored(ImVec4(255, 0, 255, 255), name);
ImGui::NewLine();
ImGui::SliderFloat("SIZE", &size, 1.0, 10.0);
ImGui::SliderInt("FOV", &ifov, 30, 120, "%d");
ImGui::NewLine();
ImGui::Button("Render", ImVec2(250, 60));
ImGui::BeginChild("Inside color", size2);
ImGui::TextColored(inside_color, "INSIDE COLOR");
ImGui::ColorPicker3("", (float*)&inside_color);
ImGui::TextColored(edge_color, "EDGE COLOR");
ImGui::ColorEdit3("", (float*)&edge_color);
ImGui::EndChild();
ImGui::NewLine();
ImGui::End();
// Checkbox that appears in the window
// Renders the ImGUI elements
ImGui::Render();
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
// Swap the back buffer with the front buffer
glfwSwapBuffers(window);
// Take care of all GLFW events
glfwPollEvents();
}
// Deletes all ImGUI instances
ImGui_ImplOpenGL3_Shutdown();
ImGui_ImplGlfw_Shutdown();
ImGui::DestroyContext();
// Delete window before ending the program
glfwDestroyWindow(window);
// Terminate GLFW before ending the program
glfwTerminate();
return 0;
}
How can I use the glfwSetWindowIcon() or there's another way for change window icon in native c++?
I'm trying to use all 3 libraries or whatnot but i'm quite confused by the sample code and I can't quite follow the documentation. This is the code and ill explain my confusions below:
#include <iostream>
#include <string>
#include <SDL2/SDL.h>
#include <GL/glew.h>
#include <imgui/imgui.h>
#include <imgui/imgui_stdlib.h>
#include <imgui/imgui_impl_sdl.h>
#include <imgui/imgui_impl_opengl3.h>
// Main code
int main(int argc, char* argv[])
{
if (SDL_Init(SDL_INIT_VIDEO) != 0)
{
std::cout << SDL_GetError() << std::endl;
return -1;
}
// GL 3.0 + GLSL 130
const char* glsl_version = "#version 130";
SDL_GL_SetAttribute(SDL_GL_CONTEXT_FLAGS, 0);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 0);
// Create window with graphics context
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_GL_SetAttribute(SDL_GL_STENCIL_SIZE, 8);
SDL_WindowFlags window_flags = (SDL_WindowFlags)(SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE | SDL_WINDOW_ALLOW_HIGHDPI);
SDL_Window* window = SDL_CreateWindow("Dear ImGui SDL2+OpenGL3 example", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 1280, 720, window_flags);
SDL_GLContext gl_context = SDL_GL_CreateContext(window);
SDL_GL_MakeCurrent(window, gl_context);
SDL_GL_SetSwapInterval(0); // Disable vsync
if (glewInit() != GLEW_OK) {
std::cout << "Error initializing glew\n";
}
IMGUI_CHECKVERSION();
ImGui::CreateContext();
ImGuiIO& io = ImGui::GetIO();
io.ConfigFlags |= ImGuiConfigFlags_NavEnableKeyboard; // Enable Keyboard Controls
//io.ConfigFlags |= ImGuiConfigFlags_NavEnableGamepad; // Enable Gamepad Controls
// Setup Dear ImGui style
ImGui::StyleColorsDark();
//ImGui::StyleColorsClassic();
// Setup Platform/Renderer backends
ImGui_ImplSDL2_InitForOpenGL(window, gl_context);
ImGui_ImplOpenGL3_Init(glsl_version);
ImFont* font = io.Fonts->AddFontFromFileTTF("C:\\Windows\\Fonts\\Arial.ttf", 30.0f);
ImVec4 clear_color = ImVec4(0.45f, 0.55f, 0.60f, 1.00f);
// Main loop
bool running = false;
SDL_Event event;
while (!running)
{
while (SDL_PollEvent(&event))
{
ImGui_ImplSDL2_ProcessEvent(&event);
if (event.type == SDL_QUIT)
running = true;
if (event.type == SDL_WINDOWEVENT && event.window.event == SDL_WINDOWEVENT_CLOSE && event.window.windowID == SDL_GetWindowID(window))
running = true;
}
// Start the Dear ImGui frame
ImGui_ImplOpenGL3_NewFrame();
ImGui_ImplSDL2_NewFrame(window);
ImGui::NewFrame();
{
static std::string buf = "";
ImGui::PushFont(font);
ImGui::Begin("Window");
ImGui::InputText("Hello", &buf);
//std::cout << io.Fonts->Fonts.size() << std::endl;
ImGui::Text("Application average %.3f ms/frame (%.1f FPS)", 1000.0f / ImGui::GetIO().Framerate, ImGui::GetIO().Framerate);
ImGui::End();
ImGui::PopFont();
}
// Rendering
glViewport(0, 0, (int)io.DisplaySize.x, (int)io.DisplaySize.y);
glClearColor(clear_color.x, clear_color.y, clear_color.z, clear_color.w);
glClear(GL_COLOR_BUFFER_BIT);
ImGui::Render();
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
SDL_GL_SwapWindow(window);
}
// Cleanup
ImGui_ImplOpenGL3_Shutdown();
ImGui_ImplSDL2_Shutdown();
ImGui::DestroyContext();
SDL_GL_DeleteContext(gl_context);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
There are a few confusions I have here, starting with there being no SDL_Renderer anywhere within the code. I notice that the display draw color is handled by OpenGL, but the rendering is called via glClear(GL_COLOR_BUFFER_BIT); (I THINK). I'm unsure, though, how I could actually then call any SDL2 functions such as SDL_RenderFillRect() with no SDL_Renderer ? My best hint is this line:
ImGui::Render();
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
SDL_GL_SwapWindow(window);
where its SDL_GL_SwapWindow() but this I believe just also renders for the OpenGL? I'm not really sure what line out of all the rending actually does what. I mean I would have thought ImGui::Render() would render all ImGui things, but then theres an ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData()); and then I'm not sure how the SDL_GL_SwapWindow ties in since i've already called glClear(). Additionally, why is there a function called ImGui::EndFrame() but not called in the sample code at the end of a frame and then there is ImGui::NewFrame() for each loop and same for ImGui_ImplOpenGL3_NewFrame(); ImGui_ImplSDL2_NewFrame(window); Can someone please explain some of these things its very confusing.
SDL_Renderer is something you need if you want to use the SDL API for drawing tasks, but it is not required if you just create the OpenGL context with SDL and do all the drawing directly with OpenGL.
but the rendering is called via glClear(GL_COLOR_BUFFER_BIT);
No, glClearclearse part of the current render buffer, in this case, the color (What is the purpose of GL_COLOR_BUFFER_BIT and GL_DEPTH_BUFFER_BIT?
)
SDL_GL_SwapWindow(window); brings the contents of the current render buffer (the rendering) to the window SDL_GL_SwapWindow
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData()); invokes he drawing of the ImGUI components.
I want to use OpenGL 3.1.
I'm using a Macbook Pro with 2 graphic cards: NVIDIA GeForce GT 650M 1024 MB, and Intel HD Graphics 4000 1536 MB. They both support up to OpenGL 4.1.
Previously i was able to draw a triangle however, my program was using version 2.1. Therefore I added: SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);. However, now the triangle is no longer drawn.
#include <string>
#include <iostream>
#include <SDL2/SDL.h>
#define GL3_PROTOTYPES 1
#include "../include/GL3/gl3.h"
int main(int argc, const char *argv[]) {
// Initialize the SDL
if(SDL_Init(SDL_INIT_VIDEO) < 0) {
std::cout << "Failed to initialize the SDL: " << SDL_GetError() << std::endl;
SDL_Quit();
return -1;
}
// Configure the SDL to use OpenGL 3.1
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 1);
// ======= HERE =======
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
// ====================
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_Window* window = SDL_CreateWindow("Triangle Test", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 800, 600, SDL_WINDOW_SHOWN | SDL_WINDOW_OPENGL);
if (window == 0) {
std::cout << "Error when creating the window: " << SDL_GetError() << std::endl;
SDL_Quit();
return -1;
}
// Create the OpenGL context
SDL_GLContext contextOpenGL = SDL_GL_CreateContext(window);
// Initialization may fail
if (contextOpenGL == 0) {
std::cout << SDL_GetError() << std::endl;
SDL_DestroyWindow(window);
SDL_Quit();
return -1;
}
SDL_Event events;
bool end = false;
// Define the vertices of our triangle
static const GLfloat vertices[] = {0.0, 1.0, // left point
-0.5, 0.0, // right point
0.5, 0.0}; // upper point
const int TRIANGLE_IDX = 0;
while(!end) {
SDL_WaitEvent(&events);
if(events.window.event == SDL_WINDOWEVENT_CLOSE) {
end = true;
}
// Clear the screen
glClear(GL_COLOR_BUFFER_BIT);
// Send vertices to OpenGL
glVertexAttribPointer(TRIANGLE_IDX, 2, GL_FLOAT, GL_FALSE, 0, vertices);
// Activate our vertex array
glEnableVertexAttribArray(TRIANGLE_IDX);
// Draw the points passed previously
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(TRIANGLE_IDX);
// Refresh the screen
SDL_GL_SwapWindow(window);
}
return 0;
}
I tried to first use glGenBuffers, glBindBuffer and glBufferData but i could not manage to make it work.
The Fixed Function Pipeline has been removed from core OpenGL 3.1 and above.
You will have to use shaders instead. This site has a nice example of how to use them.
I have a problem with opencv and opengl. I need to show with opengl the images retrieved by a webcam with opencv, and put this in Razer OSVR. But with my current code the framerate is around 1 fps or 2 fps, and I don't know what I am doing wrong. Here is my code, I think the wrong thing is in draw_cube() function.
Main.cpp
// Internal Includes
#include <osvr/ClientKit/ClientKit.h>
#include <osvr/ClientKit/Display.h>
#include "SDL2Helpers.h"
#include "OpenGLCube.h"
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/opencv.hpp>
// Library/third-party includes
#include <SDL.h>
#include <SDL_opengl.h>
// Standard includes
#include <iostream>
static auto const WIDTH = 1920;
static auto const HEIGHT = 1080;
// Forward declarations of rendering functions defined below.
void render(osvr::clientkit::DisplayConfig &disp);
int main(int argc, char *argv[]) {
namespace SDL = osvr::SDL2;
// Open SDL
SDL::Lib lib;
// Use OpenGL 2.1
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 2);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 1);
// Create a window
auto window = SDL::createWindow("OSVR", SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED, WIDTH, HEIGHT,
SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN);
if (!window) {
std::cerr << "Could not create window: " << SDL_GetError() << std::endl;
return -1;
}
// Create an OpenGL context and make it current.
SDL::GLContext glctx(window.get());
// Turn on V-SYNC
SDL_GL_SetSwapInterval(1);
// Start OSVR and get OSVR display config
osvr::clientkit::ClientContext ctx("com.osvr.example.SDLOpenGL");
osvr::clientkit::DisplayConfig display(ctx);
if (!display.valid()) {
std::cerr << "\nCould not get display config (server probably not "
"running or not behaving), exiting."
<< std::endl;
return -1;
}
std::cout << "Waiting for the display to fully start up, including "
"receiving initial pose update..."
<< std::endl;
while (!display.checkStartup()) {
ctx.update();
}
std::cout << "OK, display startup status is good!" << std::endl;
// Event handler
SDL_Event e;
#ifndef __ANDROID__ // Don't want to pop up the on-screen keyboard
SDL::TextInput textinput;
#endif
bool quit = false;
while (!quit) {
// Handle all queued events
while (SDL_PollEvent(&e)) {
switch (e.type) {
case SDL_QUIT:
// Handle some system-wide quit event
quit = true;
break;
case SDL_KEYDOWN:
if (SDL_SCANCODE_ESCAPE == e.key.keysym.scancode) {
// Handle pressing ESC
quit = true;
}
break;
}
if (e.type == SDL_QUIT) {
quit = true;
}
}
// Update OSVR
ctx.update();
// Render
render(display);
// Swap buffers
SDL_GL_SwapWindow(window.get());
}
return 0;
}
void render(osvr::clientkit::DisplayConfig &disp) {
/// For each viewer, eye combination...
disp.forEachEye([](osvr::clientkit::Eye eye) {
/// For each display surface seen by the given eye of the given
/// viewer...
eye.forEachSurface([](osvr::clientkit::Surface surface) {
auto viewport = surface.getRelativeViewport();
glViewport(static_cast<GLint>(viewport.left),
static_cast<GLint>(viewport.bottom),
static_cast<GLsizei>(viewport.width),
static_cast<GLsizei>(viewport.height));
glLoadIdentity();
cv::VideoCapture cap(0); // open the default camera
cv::Mat img;
cap >> img; // get a new frame from camera
cv::flip(img,img,0);
//resize(img, img, Size(160, 140), 0, 0, INTER_CUBIC);
draw_cube(img);
});
});
}
OpenGLCube.h
#ifndef INCLUDED_OpenGLVIDEO_h
#define INCLUDED_OpenGLVIDEO_h_
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/opencv.hpp>
using namespace cv;
GLuint texture;
void draw_cube(cv::Mat img)
{
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB,img.size().width,img.size().height, 0, GL_BGR,GL_UNSIGNED_BYTE,img.data);
glEnable(GL_TEXTURE_2D);
glBegin(GL_QUADS);
glTexCoord2d(0.0, 1.0);
glVertex2d(-1,1);
glTexCoord2d(0.0, 0.0);
glVertex2d(-1,-1);
glTexCoord2d(1.0, 0.0);
glVertex2d(1,-1);
glTexCoord2d(1.0, 1.0);
glVertex2d(1,1);
glEnd();
glDisable(GL_TEXTURE_2D);
glDeleteTextures(1, &texture);
}
#endif
Thanks.
glGenTextures should be called once, before your main loop since you don't need to create a brand new texture every frame. You can simply overwrite the previous data using the same texture object using glTexImage2D just as you are now. Similarly, as a good convention, glDeleteTextures should be called once before exiting the program.
For example, the following should be done outside of your main loop:
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
You will, of course, need to declare texture outside of your main loop and draw function and pass that in.
This line:
cv::VideoCapture cap(0); // open the default camera
It mustn't be in a mainloop.
Thanks for your replies.
For anyone who has seen my previous questions, after working through the RedBook for Version 2.1, I am now moving on to Version 4.3. (Hoary you say, since many of you have been telling me to do this for ages.)
So, I am deep into Chapter 3, but still haven't got Chapter 1 's example program working.
I have two problems. (Actually 3.) Firstly, it doesn't compile. Okay so that's a problem, but kind of irrelevant considering the next two. Secondly, I don't exactly understand how it works or what it is trying to do, but we will get onto that.
Thirdly, it seems to me that the author of this code is a complete magician. I would suggest all sorts of tinkery-hackery are occurring here. This is most likely to be because of Problem Number 2, the fact that I don't understand what it is trying to do. The guys who wrote this book are, of course, not idiots, but bear with me, I will give an example.
Here is a section of code taken from the top of the main.cpp file. I will include the rest of the file later on, but for now:
enum VAO_IDs {
Triangles,
NumVAOs
};
If I understand correctly, this gives VAO_IDs::Triangles the value of 1, since enum's are zero based. (I hope I am correct here, or it will be embarrassing for me.)
A short while later, you can see this line:
GLuint VAOs[NumVAOs];
Which declares an array of GLuint's, containing 1 GLuint's due to the fact that NumVAOs is equal to 1. Now, firstly, shouldn't it be VAO_IDs::NumVAOs?
And secondly, why on earth has an enum been used in this way? I would never use an enum like that for obvious reasons - cannot have more than one data with the same value, values are not explicitly specified etc...
Am I barking up the right tree here? It just doesn't make sense to do this... VAOs should have been a global, like this, surely? GLuint NumVAOs = 1; This is just abusive to the enum!
In fact, below the statement const GLuint NumVertices = 6; appears. This makes sense, doesn't it, because we can change the value 6 if we wanted to, but we cannot change NumVAOs to 0 for example, because Triangles is already set to 0. (Why is it in an enum? Seriously?)
Anyway, forget the enum's... For now... Okay so I made a big deal out of that and that's the end of the problems... Any further comments I have are in the code now. You can ignore most of the glfw stuff, its essentially the same as glut.
// ----------------------------------------------------------------------------
//
// Triangles - First OpenGL 4.3 Program
//
// ----------------------------------------------------------------------------
#include <cstdlib>
#include <cstdint>
#include <cmath>
#include <stdio.h>
#include <iostream>
//#include <GL/gl.h>
//#include <GL/glu.h>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
/// OpenGL specific
#include "vgl.h"
#include "LoadShaders.h" // These are essentially empty files with some background work going on, nothing declared or defined which is relevant here
enum VAO_IDs {
Triangles,
NumVAOs
};
// So Triangles = 0, NumVAOs = 1
// WHY DO THIS?!
enum Buffer_IDs {
ArrayBuffer,
NumBuffers
};
enum Attrib_IDs {
vPosition = 0
}
// Please, please, please someone explain the enum thing to me, why are they using them instead of global -just- variables.
// (Yeah an enum is a variable, okay, but you know what I mean.)
GLuint VAOs[NumVAOs]; // Compile error: expected initializer before 'VAOs'
GLuint Buffers[NumBuffers]; // NumBuffers is hidden in an enum again, so it NumVAOs
const GLuint NumVertices = 6; // Why do something different here?
// ----------------------------------------------------------------------------
//
// Init
//
// ----------------------------------------------------------------------------
void init()
{
glGenVertexArrays(NumVAOs, VAOs); // Error: VAOs was not declared in this scope
glBindVertexArray(VAOs[Triangles]);
GLfloat vertices[NumVertices][2] = {
{ -0.90, -0.90 },
{ +0.85, -0.90 },
{ -0.90, +0.85 },
{ +0.90, -0.85 },
{ +0.90, +0.90 },
{ -0.85, +0.90 }
};
glGenBuffers(NumBuffers, Buffers);
glBindBuffer(GL_ARRAY_BUFFER, Buffers[ArrayBuffer]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
ShaderInfo shaders[] = {
{ GL_VERTEX_SHADER, "triangles.vert" },
{ GL_FRAGMENT_SHADER, "triangles.frag" },
{ GL_NONE, nullptr }
};
GLuint program = LoadShaders(shaders);
glUseProgram(program);
glVertexAttribPointer(vPosition, 2, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0));
glEnableVertexAttribArray(vPosition);
}
// ----------------------------------------------------------------------------
//
// Display
//
// ----------------------------------------------------------------------------
void display()
{
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(VAOs[Triangles]);
glDrawArrays(GL_TRIANGLES, 0, NumVertices); // Error VAOs not declared
glFlush();
}
// ----------------------------------------------------------------------------
//
// Main
//
// ----------------------------------------------------------------------------
void error_handle(int error, const char* description)
{
fputs(description, stderr);
}
void key_handle(GLFWwindow* window, int key, int scancode, int action, int mods)
{
if(key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GL_TRUE);
}
void handle_exit()
{
}
int main(int argc, char **argv)
{
// Setup exit function
atexit(handle_exit);
// GLFW Window Pointer
GLFWwindow* window;
// Setup error callback
glfwSetErrorCallback(error_handle);
// Init
if(!glfwInit())
{
exit(EXIT_FAILURE);
}
// Setup OpenGL
glClearColor(0.0, 0.0, 0.0, 0.0);
glEnable(GL_DEPTH_TEST);
// Set GLFW window hints
glfwWindowHint(GLFW_DEPTH_BITS, 32);
glfwWindowHint(GLFW_RED_BITS, 8);
glfwWindowHint(GLFW_GREEN_BITS, 8);
glfwWindowHint(GLFW_BLUE_BITS, 8);
glfwWindowHint(GLFW_ALPHA_BITS, 8);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
//glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, 1);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// Init GLEW
if(glewInit())
{
printf("GLEW init failure!\n", stderr);
exit(EXIT_FAILURE);
}
// Init OpenGL
init();
// Create Window
window = glfwCreateWindow(800, 600, "Window Title", nullptr, nullptr);
if(!window)
{
glfwTerminate();
return EXIT_FAILURE;
}
// Make current
glfwMakeContextCurrent(window);
// Set key callback
glfwSetKeyCallback(window, key_handle);
// Check OpenGL Version
char* version;
version = (char*)glGetString(GL_VERSION);
printf("OpenGL Application Running, Version: %s\n", version);
// Enter main loop
while(!glfwWindowShouldClose(window))
{
// Event polling
glfwPollEvents();
// OpenGL Rendering
// Setup OpenGL viewport and clear screen
float ratio;
int width, height;
glfwGetFramebufferSize(window, &width, &height);
ratio = width / height;
glViewport(0, 0, width, height);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Setup projection
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45.0, ratio, 0.1, 10.0);
// Render
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
// Swap Buffers
glfwSwapBuffers(window);
}
// Free glfw memory allocated for window
glfwDestroyWindow(window);
// Exit
glfwTerminate();
exit(EXIT_SUCCESS);
}
A very verbose question I realize, but I thought it was important to explain why I think its crazy code rather than just saying "I don't get it" like it's easy to. Could someone please explain why these very clever people decided to do it this way and why there are errors. (I can find nothing about this online.)
The author is using the automatic numbering property of enums to automatically update the definition for the NumVAOs and NumBuffers values. For example, when new VAO IDs are added to the enum the NumVAOs value will still be correct as long as it is listed last in the enum.
enum VAO_IDs {
Triangles,
Polygons,
Circles,
NumVAOs
};
Most likely your compiler does not support this trick.