When I use GL3W, I cannot load textures. They appear blank or messed up.
I wanted an OpenGL 4.2 context in SDL 1.3 and so I decided to use GL3W, as GLEW used deprecated functions. Everything seems to work fine, however, when I try to load a texture, it either gets messed up (randomly colored lines) or simply ends up blank (black without alpha, transparent with). Everything else I've tried so far has worked (shaders, VAO's, VBO's, etc.)
I wrote the most simple example I could come up with to illustrate:
#include <SDL.h>
#include <SDL_image.h>
#include <GL3/gl3w.h>
#include <gl/gl.h>
#include <gl/glu.h>
int main(int argc, char* argv[]) {
SDL_Init(SDL_INIT_EVERYTHING);
SDL_WindowID mainWindow = SDL_CreateWindow("Test", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 800, 600, SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 4);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);
SDL_GLContext mainContext = SDL_GL_CreateContext(mainWindow);
SDL_GL_MakeCurrent(mainWindow, mainContext);
gl3wInit();
GLuint id;
glGenTextures(1, &id);
glBindTexture(GL_TEXTURE_2D, id);
SDL_Surface* test2 = IMG_Load("test.png");
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, test2->w, test2->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, test2->pixels);
// Loop to keep the window open while debugging in gDEBugger
while (1) {
SDL_GL_SwapWindow(mainWindow);
}
return 0;
}
I don't know how relevant it is, but since gl3w is generated by a python script I'll include it (external links because of length):
gl3w.c: http://pastebin.com/T5GNdJL8
gl3w.h: http://pastebin.com/yU2EzBZP
gl3.h: http://pastebin.com/0uCgB8d1
If I remove #include <GL3/gl3w.h> and glewInit(); the texture is successfully loaded.
Related
Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 5 years ago.
Improve this question
I wanted to do some 2D on top of 3D so I could do a decent GUI. So I created the textures and so on.
I can compile the code, it generates no errors. But when I run the program all goes right until I call this:
glGenFramebuffers(1, &fb);
Then this appears:
error 139 segmentation fault (core dumped).
Does someone know what's wrong with the code?
std::cout << "test1" << std::endl;
unsigned int fb;
glGenFramebuffers(1, &fb);
std::cout << "test2" << std::endl;
glBindRenderbuffer(GL_RENDERBUFFER, fb);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, renderTexture.getId(), 0);
The "test1" text is displayed, but the "test2" is not.
CODE:
game.cpp
#include "game.h"
game::game(){
SDL_Init(SDL_INIT_EVERYTHING);
SDL_Surface* screen = SDL_SetVideoMode(1000, 600, 32, SDL_SWSURFACE|SDL_OPENGL);
glClearColor(0.5, 0.5, 0.5, 1.0);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45, 1000.0/600.0, 1.0, 500.0);
glMatrixMode(GL_MODELVIEW);
glEnable(GL_DEPTH_TEST);
glEnable(GL_TEXTURE_2D);
}
game::~game(){
SDL_Quit();
}
void game::start(){
Uint32 start;
SDL_Event event;
texture renderTexture = texture();
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, 1000, 600, 0, GL_BGRA, GL_UNSIGNED_BYTE, 0);
std::cerr << "test1" << std::endl;
unsigned int fb;
glGenFramebuffers(1, &fb);
std::cerr << "test2" << std::endl;
glBindRenderbuffer(GL_RENDERBUFFER, fb);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, renderTexture.getId(), 0);
bool running = true;
while (running){
start = SDL_GetTicks();
while (SDL_PollEvent(&event)){
switch (event.type){
case SDL_QUIT:
running = false;
break;
}
}
update();
show(fb);
showMenu();
SDL_GL_SwapBuffers();
if (1000/30 > (SDL_GetTicks()-start)){
SDL_Delay(1000/30 - (SDL_GetTicks()-start));
}
}
}
void game::update(){
}
void game::show(unsigned int fb){
glBindFramebuffer(GL_FRAMEBUFFER, fb);
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
}
void game::showMenu(){
bindWindowAsRenderTarget();
glViewport(0, 0, 1000, 600);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0, 1000.0, 0.0, 600.0, -1.0, 1.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
void game::bindWindowAsRenderTarget(){
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
glViewport(0, 0, 1000, 600);
}
game.h
#ifndef GAME_H_INCLUDED
#define GAME_H_INCLUDED
#include <iostream>
#include <SDL/SDL.h>
#include <GL/glew.h>
#include <GL/gl.h>
#include <GL/glu.h>
#include "texture.h"
class game{
void update();
void show(unsigned int fb);
void showMenu();
void bindWindowAsRenderTarget();
public:
game();
~game();
void start();
};
#endif // GAME_H_INCLUDED
main.cpp
#include "game.h"
int main(int argc, char** argv){
game g;
g.start();
return 0;
}
texture.cpp
#ifndef TEXTURE_H_INCLUDED
#define TEXTURE_H_INCLUDED
#include <SDL/SDL.h>
#include <GL/glew.h>
#include <GL/gl.h>
#include <GL/glu.h>
class texture{
unsigned int id;
public:
texture();
~texture();
void loadImage(const char* filename);
unsigned int getId();
};
#endif // TEXTURE_H_INCLUDED
texture.cpp
#include "texture.h"
texture::texture(){
glGenTextures(1, &id);
glBindTexture(GL_TEXTURE_2D, id);
}
texture::~texture(){
glDeleteTextures(1, &id);
}
void texture::loadImage(const char* filename){
SDL_Surface* img = SDL_LoadBMP(filename);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, img->w, img->h, 0, GL_RGB, GL_UNSIGNED_SHORT_5_6_5, img->pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
SDL_FreeSurface(img);
}
unsigned int texture::getId(){
return id;
}
Dollars to donuts you're #includeing a GL extension loader (GLEW, GLAD, etc.) and then:
Trying to init it before you have a current GL context (leaving function pointers like glGenFramebuffers() NULL), or
Forgetting to init it entirely, or
(Unlikely, unless you're doing something silly like trying to use OpenGL via remote desktop) correctly initing your extension loader but using a GL implementation that doesn't support core FBOs
EDIT Oh hey it's #2.
You need to call glewInit() after SDL_SetVideoMode(). You should also use the GLEW version checking functions to verify that the version of the underlying GL implementation supports core FBOs (they went in in OpenGL 2.1 IIRC).
Ok, before anyone marks this question as a duplicate, I have looked at What is an undefined reference/unresolved external symbol error and how do I fix it? and many other online posts. I've tried every solution I've come acoss but I still can't fix these errors:
1>SOIL.lib(stb_image_aug.o) : error LNK2019: unresolved external symbol __alloca referenced in function _stbi_zlib_decode_noheader_buffer
1>SOIL.lib(image_helper.o) : error LNK2019: unresolved external symbol _sqrtf referenced in function _RGBE_to_RGBdivA2
I'm using Visual Studio 2012 on Windows 8. I've tried rebuilding the library and I have quintuple checked all my includes and directories.Here are the SOIL includes/directory I have:
>Configuration Properties->VC++ Directories->Include Directories: "C:\SOIL\Simple OpenGL Image Library\src
>Configuration Properties->VC++ Directories->Library Directories: "C:\SOIL\Simple OpenGL Image Library\lib
>Congiguration Properties->Linker->Input->Additional Dependencies: "SOIL.lib"
And here's my code. The SOIL_load_image function is what's causing the errors:
#include <Windows.h>
#include <GL/glut.h>
#include "glext.h"
#include <SOIL.h>
void glEnable2D( void );
void display();
void glDisable2D( void );
GLuint tex;
/* Main function: GLUT runs as a console application starting at main() */
int main(int argc, char** argv)
{
glutInit(&argc, argv); // Initialize GLUT
glutCreateWindow("OpenGL Setup Test"); // Create a window with the given title
glutInitWindowSize(320, 320); // Set the window's initial width & height
glutInitWindowPosition(50, 50); // Position the window's initial top-left corner
//Texture
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER);
float color[] = { 1.0f, 0.0f, 0.0f, 1.0f };
glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, color);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
int width, height;
unsigned char* image = SOIL_load_image("Resources/Sprites/playerCharacter.png", &width, &height, 0, SOIL_LOAD_RGB);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, image);
SOIL_free_image_data(image);
//Texture
glutDisplayFunc(display); // Register display callback handler for window re-paint
glutMainLoop(); // Enter the infinitely event-processing loop
return 0;
}
I've been working on this all day and I haven't been able to figure out what I'm doing wrong, and it's been extremely frustrating. Note: I know the original name of SOIL.lib is "libSOIL.a." I tried working with it with that name and got the same errors.
You can build the VC8 or VC9 solution files yourself in VS2012/13. Just make sure you are Win32 configuration if you are building a 32bit application. Change the configuration properties to x64 if you are building a 64bit application. Once the projects are built, link your openGL project to your these generated SOIL.lib files and you will be good to go.
I have been having a very odd problem when trying to use OpenGL's C++ API. I am trying to load in a texture using ImageMagick, and then display it as a simple 2D textured square. I have a decent amount of experience with using OpenGL in Java, so I understand how to render a texture and bind it to a primitive. However, each time I attempt to draw it, the program either fails to render, or it renders it as a (properly sized) white square. I'm not entirely sure what is going on, but I believe it has to do with ImageMagick.
I have been using Ubuntu's terminal for compiling, and I've learned just how painful it can be to have to install libraries manually. ImageMagick first refused to compile when used in my program, and when I finally got the program to compile, it would seg-fault each time it ran. I've finally got it "working", but now, whenever I attempt to load in the image, the program will run without rendering. I haven't found anything like this on Google.
http://imgur.com/C7yKwDK
The odd thing is, very rarely, it will work correctly and render the square as expected. However, when I then try to rerun the program, it fails as shown above. I've determined that the line that causes it to fail to render is the same line the image is loaded, so that led me to believe that the image was just being loaded incorrectly, causing the program to fail. However, if I move the texture loading code before the creation of the GL window, the program will consistently render successfully, but the textured square appears only as white (though the size of the square is correct, so I know the image loading is working).
Anyway, sorry for the long post. I've just given up solving this one on my own, and was hoping one of you could help me out.
OpenGL Initialization Code:
Texture* tx;
void GraphicsOGL :: initialize3D(int argc, char* argv[]) {
Magick::InitializeMagick(*argv);
glutInit(&argc, argv);
//Loading Here ALWAYS Causes White Square
/*glEnable(GL_TEXTURE_2D);
tx = new Texture("Resources/Images/test.png");
tx->load();*/
glutInitDisplayMode(GLUT_DOUBLE|GLUT_RGBA);
glutInitWindowSize(SCREEN_WIDTH, SCREEN_HEIGHT);
glutInitWindowPosition(100, 100);
glutCreateWindow("OpenGL Game");
glViewport(0,0,SCREEN_WIDTH,SCREEN_HEIGHT);
glOrtho(0,SCREEN_WIDTH,SCREEN_HEIGHT,0, -3,1000);
glEnable(GL_DEPTH_TEST);
glEnable(GL_ALPHA_TEST);
glEnable(GL_TEXTURE_2D);
//Loading Here SOMETIMES Works, But Typically Fails
tx = new Texture("Resources/Images/test.png");
tx->load();
glutDisplayFunc(displayCallback);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glutMainLoop();
}
Texture Loading Code:
bool Texture::load() {
try {
m_image.read(m_fileName); //This Line Causes it to Fail to Render
m_image.write(&m_blob, "RGBA");
}
catch (Magick::Error& Error) {
std::cout << "Error loading texture '" << m_fileName << "': " << Error.what() << std::endl;
return false;
}
width = m_image.columns();
height = m_image.rows();
glGenTextures(1, &m_textureObj);
glBindTexture(m_textureTarget, m_textureObj);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, 0);
glTexParameterf(m_textureTarget, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(m_textureTarget, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP, GL_TRUE);
glTexImage2D(m_textureTarget, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, m_blob.data());
//glBindTexture(m_textureTarget, 0);
return true;
}
Texture Drawing Code:
void GraphicsOGL :: drawTexture(float x, float y, Texture* tex) {
glEnable(GL_TEXTURE_2D);
tex->bind();
float depth = 0, w, h;
w = tex->getWidth();
h = tex->getHeight();
glBegin(GL_QUADS);
glVertex3f(x, y+h, depth); glTexCoord2f(1,0);
glVertex3f(x+w, y+h, depth); glTexCoord2f(1,1);
glVertex3f(x+w, y, depth); glTexCoord2f(0,1);
glVertex3f(x, y, depth); glTexCoord2f(0,0);
glEnd();
}
I have a problem loading a texture using SDL library.
Usually I make programs on Linux but I try to create a code that is compatible with Visual Studio also.
On Linux are everything OK but on Visual Studio it crashes in "GL_UNSIGNED_SHORT_5_6_5" in the glTexImage2D(...) function.
Below is a general idea about what i want to do which I inspired by this tutorial:
#include "stdafx.h"
#include <stdlib.h>
#include <stdio.h>
#include <GL/glut.h>
//#include <GL/glext.h>
#include "SDL.h"
int brick;
float c=0.5;
float rx_min=0, ry_min=0;
float rx_max=1, ry_max=1;
unsigned int LoadTexture(const char* filename);
void DrawTexture(int object);
void setupmywindow();
void myDrawing();
void setupmywindow()
{
glClearColor(1.0,1.0,1.0,0);
glColor3f(0.0, 0.0, 0.0);
glPolygonMode(GL_FRONT_AND_BACK,GL_FILL);
gluOrtho2D(rx_min,ry_min, rx_max, ry_max);
brick = LoadTexture("brick.bmp");
}
void DrawTexture(int object)
{
glBindTexture(GL_TEXTURE_2D, object);
glColor3f(c,c,c);
glBegin(GL_QUADS);
glTexCoord2f(0., 1. );
glVertex2f( rx_min , ry_min );
glTexCoord2f(0., 0. );
glVertex2f( rx_min, ry_max );
glTexCoord2f(1., 0. );
glVertex2f( rx_max , ry_max );
glTexCoord2f(1., 1. );
glVertex2f( rx_max , ry_min );
glEnd();
}
unsigned int LoadTexture(const char* filename)
{
SDL_Surface* img=SDL_LoadBMP(filename);
unsigned int id;
glGenTextures(1, &id);
glBindTexture(GL_TEXTURE_2D,id);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, img->w, img->h, 0, GL_RGB, GL_UNSIGNED_SHORT_5_6_5, img->pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
SDL_FreeSurface(img);
return id;
}
void myDrawing()
{
glClear(GL_COLOR_BUFFER_BIT);
DrawTexture(brick);
glFlush();
}
int main(int argc, char **argv)
{
printf("AUTH Computational Physics - Computer Graphics\n");
printf("Project >>TestTexture.cpp\n");
printf("--------------------------------------------------------\n");
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE|GLUT_RGB);
glutInitWindowPosition(50,50);
glutCreateWindow("Texture Test");
setupmywindow();
glutDisplayFunc(myDrawing);
glutMainLoop();
return 0;
}
The error is:
error C2065: 'GL_UNSIGNED_SHORT_5_6_5' : undeclared identifier
Here is the image that I try to load and it is configured as a bitmap (8bit 5 6 5) with GIMP 2.8
NOTE: When I uncoment #include < GL/glext.h > which is not needed on Linux, I get the above message:
Unhandled exception at 0x00d1193f in testTesxture.exe: 0xC0000005: Access violation reading location 0x00000014.
Generally if I save a bitmap image (for example with paint) how can I uderstand the type I have to put (GL_UNSIGNED_SHORT_5_6_5, GL_UNSIGNED_BYTE etc)?
The problem is likely that Windows uses an older version of OpenGL than Linux, and this old OpenGL version does not have that specific identifier (and others, I'm sure). To get around this and any other possible version problems, I would use GLEW which does the hard work for you.
In windows, add this line after the includes:
#ifndef GL_UNSIGNED_SHORT_5_6_5
#define GL_UNSIGNED_SHORT_5_6_5 0x8363
#endif
#ifndef GL_CLAMP_TO_EDGE
#define GL_CLAMP_TO_EDGE 0x812F
#endif
According to this video.
i am writing an SDL / OpenGL application that runs under OSX. I have to use existing code which uses the DevIL library for loading JPG and PNG textures. Unfortunately, this works very bad under OS X, so i decided not to use DevIL at all, and rewrite the respective parts of the application using another library. I want to keep it flexible (DevIL can handle a lot of image formats) and easy to use. Is there a good replacement for DevIL that you can recommend? The application is entirely written in C++.
Have a look at the SDL_image library. It offers functions like IMG_LoadPNG that load your picture "as an" SDL_Surface.
Since you already work with SDL this should fit quite well in your program.
Sample taken from the SDL_image documentation:
// Load sample.png into image
SDL_Surface* image = IMG_Load("sample.png");
if (image == nullptr) {
std::cout << "IMG_Load: " << IMG_GetError() << "\n";
}
SDL 2 SDL_image minimal runnable example
main.c
#include <stdlib.h>
#include <SDL2/SDL.h>
#include <SDL2/SDL_image.h>
int main(void) {
SDL_Event event;
SDL_Renderer *renderer = NULL;
SDL_Texture *texture = NULL;
SDL_Window *window = NULL;
SDL_Init(SDL_INIT_TIMER | SDL_INIT_VIDEO);
SDL_CreateWindowAndRenderer(
500, 500,
0, &window, &renderer
);
IMG_Init(IMG_INIT_PNG);
texture = IMG_LoadTexture(renderer, "flower.png");
while (1) {
SDL_RenderCopy(renderer, texture, NULL, NULL);
SDL_RenderPresent(renderer);
if (SDL_PollEvent(&event) && event.type == SDL_QUIT)
break;
}
SDL_DestroyTexture(texture);
IMG_Quit();
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_Quit();
return EXIT_SUCCESS;
}
GitHub upstream.
Compile and run:
sudo apt-get install libsdl2-dev libsdl2-image-dev
gcc -std=c99 -o main -Wall -Wextra -pedantic main.c -lSDL2 -lSDL2_image
./main
Outcome:
Tested on Ubuntu 16.04, GCC 6.4.0, SDL 2.0.4, SDL Image 2.0.1.
Take a look at freeimage. It supports all major formats and is easily built with macports. Nice to work with as well. Auto-detects image format etc.
FREE_IMAGE_FORMAT format = FreeImage_GetFileType(filename.c_str(), 0);
FIBITMAP *bitmap = FreeImage_Load(format, filename.c_str());
if (!bitmap)
{
LOG_ERROR("Unable to load texture: " + filename);
return false;
}
mlWidth = FreeImage_GetWidth(bitmap);
mlHeight = FreeImage_GetHeight(bitmap);
glGenTextures(1, &mpTextures[0]);
glBindTexture(GL_TEXTURE_2D, mpTextures[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, mlWidth, mlHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE,
(GLvoid*)FreeImage_GetBits(bitmap));
FreeImage_Unload(bitmap);
If you're on Mac OS anyway, why not just use CGImageSource to do the loading? OS X natively supports loading many file formats including PNG and JPEG.