My program displays video frames as OpenGL textures.
I have problems with OpenGL initialization. To see video I need to start rendering thread, stop it and start again. I think I am missing something in CRenderThread::InitOpenGL() function. What should I do for correct OpenGL initialization?
My environment:
Windows 7 x64
Microsoft Visual Studio 2008 x64
Here is the code:
#include "RenderThread.h"
#include <QtDebug>
#include <vm_time.h>
static Ipp32u UMCToInternalFormat(UMC::ColorFormat format)
{
switch(format)
{
case UMC::BGR24: return GL_BGR;
case UMC::BGR32: return GL_BGRA;
case UMC::RGB24: return GL_RGB;
case UMC::RGB32: return GL_RGBA;
}
return 0;
}
CRenderThread::CRenderThread(const WId& rnWindowHandle)
: m_bInitialized(false)
, m_WindowHandle(rnWindowHandle)
, m_Texture(0)
, m_fTextureWidth(0.0f)
, m_fTextureHeight(0.0f)
, m_nFrameWidth(0)
, m_nFrameHeight(0)
, m_nWindowWidth(0)
, m_nWindowHeight(0)
{
}
void CRenderThread::PrepareWork()
{
// Wait until first frame comes
if(!m_bAbort)
Suspend();
}
void CRenderThread::DoOnStop()
{
if(m_WindowGLResourceContext)
{
wglDeleteContext(m_WindowGLResourceContext);
m_WindowGLResourceContext = 0;
}
ReleaseDC(m_WindowHandle, m_WindowDC);
if(m_Texture)
{
glDeleteTextures(1, &m_Texture);
m_Texture = 0;
}
}
void CRenderThread::InitOpenGL()
{
PIXELFORMATDESCRIPTOR pfd = {
sizeof(PIXELFORMATDESCRIPTOR), 1, PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
PFD_TYPE_RGBA, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0, 0, PFD_MAIN_PLANE, 0, 0, 0, 0
};
m_WindowDC = GetDC(m_WindowHandle);
if(!m_WindowDC)
return;
if(!SetPixelFormat(m_WindowDC, ChoosePixelFormat(m_WindowDC, &pfd), &pfd))
return;
m_WindowGLResourceContext = wglCreateContext(m_WindowDC); // create rendering context
if(!m_WindowGLResourceContext)
return;
if(!wglMakeCurrent(m_WindowDC, m_WindowGLResourceContext)) // set it as current
return;
// OpenGL context already tied to output window
// to disable all slow GL components
// it is not mandatory to disable all if we have accelerated card
glClearColor(0.0f, 170.0f, 255.0f, 1.0f);
glClearDepth(1.0);
glDepthFunc(GL_NEVER);
// disable slow GL extensions
glDisable(GL_DEPTH_TEST); glDisable(GL_ALPHA_TEST); glDisable(GL_BLEND);
glDisable(GL_DITHER); glDisable(GL_FOG); glDisable(GL_STENCIL_TEST);
glDisable(GL_LIGHTING); glDisable(GL_LOGIC_OP); glDisable(GL_TEXTURE_1D);
glDisable(GL_TEXTURE_2D);
glPixelTransferi(GL_MAP_COLOR, GL_FALSE);
glPixelTransferi(GL_RED_SCALE, 1); glPixelTransferi(GL_RED_BIAS, 0);
glPixelTransferi(GL_GREEN_SCALE, 1); glPixelTransferi(GL_GREEN_BIAS, 0);
glPixelTransferi(GL_BLUE_SCALE, 1); glPixelTransferi(GL_BLUE_BIAS, 0);
glPixelTransferi(GL_ALPHA_SCALE, 1); glPixelTransferi(GL_ALPHA_BIAS, 0);
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &m_Texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glViewport(0, 0, m_nWindowWidth, m_nWindowHeight);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glRasterPos2i(-1, 1); // move to the upper left corner
glPixelZoom(1.0, -1.0); // top to bottom
SwapBuffers(m_WindowDC);
m_bInitialized = true;
}
void CRenderThread::SetRenderFrame(PVideoData pFrame)
{
Q_ASSERT(pFrame.get());
{
//boost::mutex::scoped_lock Lock(m_FrameMutex);
m_pFrameToRender = pFrame;
}
// Resume thread to render current frame
Resume();
}
void CRenderThread::DoWork()
{
IppiSize CurWinSize;
UMC::Status nStatus = UMC::UMC_OK;
::RECT rect;
GetClientRect(m_WindowHandle, &rect);
CurWinSize.height = rect.bottom;
CurWinSize.width = rect.right;
if(!m_bInitialized)
InitOpenGL();
if(CurWinSize.width > IPP_MAX_16S || CurWinSize.height > IPP_MAX_16S) // window seems to be destroyed
return;
// reinit buffers if window size has been changed
if(CurWinSize.height != m_nWindowHeight || CurWinSize.width != m_nWindowWidth)
{
m_nWindowWidth = CurWinSize.width;
m_nWindowHeight = CurWinSize.height;
glViewport(0, 0, m_nWindowWidth, m_nWindowHeight);
}
// Render frame
{
//boost::mutex::scoped_lock Lock(m_FrameMutex);
if(m_pFrameToRender.get())
{
if(m_nWindowWidth && m_nWindowHeight)
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
m_nFrameWidth = m_pFrameToRender->GetWidth();
m_nFrameHeight = m_pFrameToRender->GetHeight();
m_nRenderFormat = UMCToInternalFormat(m_pFrameToRender->GetColorFormat());
glTexImage2D(GL_TEXTURE_2D, 0, 3, m_nFrameWidth, m_nFrameHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pFrameToRender->GetBufferPointer());
//glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, m_nFrameWidth, m_nFrameHeight, m_nRenderFormat, GL_UNSIGNED_BYTE, m_pFrameToRender->GetBufferPointer());
glBegin(GL_POLYGON);
glTexCoord2i(0, 0); glVertex2f(-1.0, 1.0);
glTexCoord2i(1, 0); glVertex2f( 1.0, 1.0);
glTexCoord2i(1, 1); glVertex2f( 1.0, -1.0);
glTexCoord2i(0, 1); glVertex2f(-1.0, -1.0);
glEnd();
glFlush();
SwapBuffers(m_WindowDC); // to draw on physical screen
}
}
}
// Wait for next frame to render
if(!m_bAbort)
Suspend();
}
Couple thoughts, not all necessarily related to your problem:
glClearColor(0.0f, 170.0f, 255.0f, 1.0f);
Clear color is clamped to the range of [0,1], not [0,255].
// disable slow GL extensions
glDisable(GL_DEPTH_TEST); glDisable(GL_ALPHA_TEST); glDisable(GL_BLEND);
glDisable(GL_DITHER); glDisable(GL_FOG); glDisable(GL_STENCIL_TEST);
glDisable(GL_LIGHTING); glDisable(GL_LOGIC_OP); glDisable(GL_TEXTURE_1D);
glDisable(GL_TEXTURE_2D);
These (and most all opengl settings) are disabled by default. These are all doing nothing. Not hurting anything though.
glPixelTransferi(GL_MAP_COLOR, GL_FALSE);
glPixelTransferi(GL_RED_SCALE, 1); glPixelTransferi(GL_RED_BIAS, 0);
glPixelTransferi(GL_GREEN_SCALE, 1); glPixelTransferi(GL_GREEN_BIAS, 0);
glPixelTransferi(GL_BLUE_SCALE, 1); glPixelTransferi(GL_BLUE_BIAS, 0);
glPixelTransferi(GL_ALPHA_SCALE, 1); glPixelTransferi(GL_ALPHA_BIAS, 0);
Again, these are all the defaults.
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &m_Texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
This might actually be a real problem. glTexParameter only effects the currently bound texture, but you're calling them here with no texture bound. So these are doing nothing. When you actually do use a texture later, it will have mipmapping set on the min filter, which could cause it not to be displayed. Move your glTexParameter calls to after you have bound the texture that you want them to effect.
Related
I'm writing something that I'm wanting to work on as much hardware as possible, but have found an issue where I don't believe that the fault is with my code. can anyone confirm?
UPDATE - also happens on windows 7, and when booting in safe mode to force no drivers, both are:
OpenGL Version: - 1.1.0,
Vendor: - Microsoft Corporation,
Renderer: - GDI Generic
when running on windows 10 with an older graphics card, microsoft opengl 1.1 driver is used. yeah, i know that shaders are the way to go, and that older cards aren't actually supported on windows 10, but since when did that stop anyone... etc. but like i said, I'm wanting maximum compatibility. Code that demonstrates the issue is included:-
//link library
//opengl32
//linker options
//-lmingw32 -lSDL2main -lSDL2
//include SDL2 dirs-
//compiler-
//SDL2-2.0.4\i686-w64-mingw32\include\SDL2
//linker-
//SDL2-2.0.4\i686-w64-mingw32\lib
#include <SDL.h>
#include <SDL_opengl.h>
void draw();
bool fix = false;
bool wire = false;
GLuint tex1 = 0;
GLuint tex2 = 0;
int main(int argc, char *argv[])
{
SDL_Init(SDL_INIT_VIDEO|SDL_INIT_TIMER);
SDL_Window* mywindow = SDL_CreateWindow("press q or w to toggle fixes...",100,100,640,480,SDL_WINDOW_OPENGL|SDL_WINDOW_SHOWN);
SDL_GL_CreateContext(mywindow);
glEnable(GL_TEXTURE_2D);
bool loop = true;
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
while (loop)
{
SDL_Event event;
while (SDL_PollEvent(&event))
{
if (event.type == SDL_QUIT)
loop = false;
if (event.type == SDL_KEYDOWN)
{
switch (event.key.keysym.sym)
{
case SDLK_ESCAPE:
loop = false;
break;
case SDLK_q:
fix = !fix;
break;
case SDLK_w:
if(wire)
{
glPolygonMode(GL_FRONT, GL_FILL);
}
else
{
glPolygonMode(GL_FRONT, GL_LINE);
}
wire = !wire;
break;
default:
break;
}
}
}
draw();
SDL_GL_SwapWindow(mywindow);
}
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDeleteTextures(1, &tex1);
glDeleteTextures(1, &tex2);
return 0;
}
void texturator(GLuint* tex, unsigned char r, unsigned char g, unsigned char b)
{
if(!(*tex))
{
glGenTextures(1, tex) ;
glBindTexture(GL_TEXTURE_2D, *tex);
unsigned char text[4*4*4];//4*w*h
for(unsigned int x = 0;x<4;x++)
{
for(unsigned int y = 0;y<4;y++)
{
unsigned char* pix = &(text[((y*4)+x)*4]);
pix[0] = x*r;
pix[1] = y*g;
pix[2] = x*b;
pix[3] = 255;
}
}
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 4, 4, 0, GL_RGBA, GL_UNSIGNED_BYTE, text);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
}
else
{
if(fix)
{
glBindTexture(GL_TEXTURE_2D, 0);
}
glBindTexture(GL_TEXTURE_2D, *tex);
}
}
void draw()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
GLfloat mat1[] = {1.0, 0, 0, 0, 0, 1.0, 0, 0, 0, 0, 1.0, 0, 0, 0, 0, 1};
glLoadMatrixf(mat1);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
GLfloat verts[] = {0.0f,0.0f,0.0f, 1.0f,0.0f,0.0f, 1.0f,1.0f,0.0f, 0.0f,1.0f,0.0f};
GLfloat uvs[] = {0.0f,0.0f, 1.0f,0.0f, 1.0f,1.0f, 0.0f,1.0f};
glVertexPointer(3, GL_FLOAT, 0, verts);
glTexCoordPointer(2, GL_FLOAT, 0, uvs);
texturator(&tex1,0,60,60);
GLuint elem[] = {0,1,2, 2,3,0};
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, elem);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
GLfloat mat2[] = {1, 0, 0, 0, -0, 1, 0, 0, 0, 0, 1, 0, -1, 0, 0, 1};
glLoadMatrixf(mat2);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
texturator(&tex2,60,10,0);
glTexCoordPointer(2, GL_FLOAT, 0, uvs);
glVertexPointer(3, GL_FLOAT, 0, verts);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
}
from the second frame on, tex2 will continue to be displayed for both draws, even though gdebugger says that tex1 is bound. binding 0 before the new bind fixes this. switching to wireframe instead of fill mode also, curiously, fixes it, whilst it is still drawn as wireframe. (use q and w keys to toggle these).
can anyone shed any light on this? is it just microsoft's implementation being..............
and is there a better way to include compatibility with older stuff?
cheers!!
I'm trying to display a Coin3D/Open Inventor scene with QT in a QGLWidget, by using the SoOffscreenRenderer and I need help converting it to a QImage
What I tried so far, is render the scene into SoOffscreenRenderer and get the buffer like this:
unsigned char * getCoinCubeImgBuffer(){
// [...] create the scene, add lightning and camera
SoOffscreenRenderer offscreenRenderer(vpRegion);
offscreenRenderer.setComponents(
SoOffscreenRenderer::Components::RGB_TRANSPARENCY
);
SbBool ok = offscreenRenderer.render(root);
// to be sure that something is actually rendered
// save the buffer content to a file
SbBool ok = offscreenRenderer.render(root);
qDebug() << "SbBool ok?" << ok;
qDebug() << "wasFileWrittenRGB" <<
offscreenRenderer.writeToRGB("C:/test-gl.rgb");
qDebug() << "wasFileWrittenPS" <<
offscreenRenderer.writeToPostScript("C:/test-gl.ps");
unsigned char * imgbuffer = offscreenRenderer.getBuffer();
return imgbuffer;
}
and then create a QImage from the buffer data:
QImage convertImgBuffer(){
unsigned char *const imgBuffer = getCoinCubeImgBuffer();
QImage img(imgBuffer, windowWidth, windowHeight, QImage::Format_ARGB32);
// Important!
img = img.rgbSwapped();
QImage imgGL = convertToGLFormat(img);
return imgGL;
}
Would this be the correct way to do it?
As described in this question about drawing a QImage, I'm able to draw it if the source is a picture.
e: To make sure that my buffer actually contains a scene, I write the buffer content to two files. You can view .rgb and .ps files for example with IrfanView plus its plugins.
e2: Just figured out, that I have to use img.rgbSwapped(). Now it's showing the scene black&white and without lightning. I will investigate further.
e3: With the code like this, you need to adapt the OpenGL call in this way to render in color
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, tex.width(),
tex.height(), 0, GL_RGBA, GL_UNSIGNED_BYTE, tex.bits());
First format is GL_RGB, second GL_RGBA. The cube is still completely black though.
e4: It was an error in my scene, you have to add a light before you add the rest and especially before you add the camera.
So now I can either use `QGLWidget`s functions like `bindTexture()` or use direct OpenGL calls, but I'm not sure how exactly. It would be great if someone could push me in the right direction.
Can't I just use OpenGL like this?
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &offscreenBufferTexture);
glBindTexture(GL_TEXTURE_2D, offscreenBufferTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, imgGL.width(),
imgGL.height(), 0, GL_RGBA, GL_UNSIGNED_BYTE,
imgGL.bits());
Or maybe `glDrawPixels()`?
glDrawPixels(imgGL.width(), imgGL.height(), GL_RGBA,
GL_UNSIGNED_BYTE, imgGL.bits());
I figured out how to draw a QImage with OpenGL, see this thread. So there seems to be a problem with the buffer or the conversion of it.
Here's the resulting code which renders the scene correctly.
First create a valid scene
void loadCoinScene(){
// Init Coin
SoDB::init();
// The root node
root = new SoSeparator;
root->ref();
// Add the light _before_ you add the camera
SoDirectionalLight * light = new SoDirectionalLight;
root->addChild(light);
vpRegion.setViewportPixels(0, 0, coinSceneWidth, coinSceneHeight);
SoPerspectiveCamera *perscam = new SoPerspectiveCamera();
root->addChild(perscam);
SbRotation cameraRotation = SbRotation::identity();
cameraRotation *= SbRotation(SbVec3f(0, 1, 0), 0.4f);
perscam->orientation = cameraRotation;
SoCube * cube = new SoCube;
root->addChild(cube);
// make sure that the cube is visible
perscam->viewAll(root, vpRegion);
}
Then render the scene into the Offscreen Buffer and convert it to a QImage:
QImage getCoinCubeImgBuffer(){
SoOffscreenRenderer offscreenRenderer(vpRegion);
offscreenRenderer.setComponents(
SoOffscreenRenderer::Components::RGB_TRANSPARENCY
);
offscreenRenderer.render(root);
QImage img(offscreenRenderer.getBuffer(), coinSceneWidth,
coinSceneHeight, QImage::Format_ARGB32);
// Important!
return img.rgbSwapped();
}
If you now want to render the QImage with OpenGL, use my solution from my Render QImage with OpenGL question and change the loadTexture2() method to this:
QImage loadTexture2(GLuint &textureID){
glEnable(GL_TEXTURE_2D); // Enable texturing
glGenTextures(1, &textureID); // Obtain an id for the texture
glBindTexture(GL_TEXTURE_2D, textureID); // Set as the current texture
QImage im = getCoinCubeImgBuffer();
// Convert to OpenGLs unnamed format
// The resulting GL format is GL_RGBA
QImage tex = QGLWidget::convertToGLFormat(im);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, tex.width(), tex.height(), 0,
GL_RGBA, GL_UNSIGNED_BYTE, tex.bits());
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glDisable(GL_TEXTURE_2D);
return tex;
}
I don't think there is a need to feed the texture through QImage. The following is a working example:
#include <QApplication>
#include <QGLWidget>
#include <Inventor/SoInput.h>
#include <Inventor/SoOffscreenRenderer.h>
#include <Inventor/nodes/SoSeparator.h>
static GLuint textureID(0);
class GLWidget : public QGLWidget
{
public:
explicit GLWidget() : QGLWidget() {}
~GLWidget() {}
protected:
void initializeGL() {
glShadeModel(GL_FLAT);
glEnable(GL_LIGHTING);
glEnable(GL_LIGHT0);
static GLfloat lightAmbient[4] = { 1.0, 1.0, 1.0, 1.0 };
glLightfv(GL_LIGHT0, GL_AMBIENT, lightAmbient);
}
void paintGL() {
if (!textureID)
return;
glClearColor(0.4f, 0.1f, 0.1f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, textureID);
glBegin(GL_QUADS);
glTexCoord2f(0,0); glVertex3f(-1, -1, -1);
glTexCoord2f(1,0); glVertex3f( 1, -1, -1);
glTexCoord2f(1,1); glVertex3f( 1, 1, -1);
glTexCoord2f(0,1); glVertex3f(-1, 1, -1);
glEnd();
glDisable(GL_TEXTURE_2D);
}
void resizeGL(int width, int height) {
int side = qMin(width, height);
glViewport((width - side) / 2, (height - side) / 2, side, side);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(-1.0, 1.0, -1.0, 1.0, 0.0, 1000.0);
glMatrixMode(GL_MODELVIEW);
}
};
int main(int argc, char *argv[])
{
QApplication app(argc, argv);
GLWidget glWidget;
glWidget.show();
static const char * inlineSceneGraph[] = {
"#Inventor V2.1 ascii\n",
"\n",
"Separator {\n",
" PerspectiveCamera { position 0 0 5 }\n",
" DirectionalLight {}\n",
" Rotation { rotation 1 0 0 0.3 }\n",
" Cone { }\n",
" BaseColor { rgb 1 0 0 }\n",
" Scale { scaleFactor .7 .7 .7 }\n",
" Cube { }\n",
"\n",
" DrawStyle { style LINES }\n",
" ShapeHints { vertexOrdering COUNTERCLOCKWISE }\n",
" Coordinate3 {\n",
" point [\n",
" -2 -2 1.1, -2 -1 1.1, -2 1 1.1, -2 2 1.1,\n",
" -1 -2 1.1, -1 -1 1.1, -1 1 1.1, -1 2 1.1\n",
" 1 -2 1.1, 1 -1 1.1, 1 1 1.1, 1 2 1.1\n",
" 2 -2 1.1, 2 -1 1.1, 2 1 1.1, 2 2 1.1\n",
" ]\n",
" }\n",
"\n",
" Complexity { value 0.7 }\n",
" NurbsSurface {\n",
" numUControlPoints 4\n",
" numVControlPoints 4\n",
" uKnotVector [ 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0 ]\n",
" vKnotVector [ 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0 ]\n",
" }\n",
"}\n",
NULL
};
SoInput in;
in.setStringArray(inlineSceneGraph);
glWidget.resize(600, 600);
SoOffscreenRenderer renderer(SbViewportRegion(glWidget.width(),
glWidget.height()));
renderer.setComponents(SoOffscreenRenderer::RGB_TRANSPARENCY);
renderer.setBackgroundColor(SbColor(.0f, .0f, .8f));
SoSeparator *rootScene = SoDB::readAll(&in);
rootScene->ref();
renderer.render(rootScene);
rootScene->unref();
glEnable(GL_TEXTURE_2D); // Enable texturing
glGenTextures(1, &textureID); // Obtain an id for the texture
glBindTexture(GL_TEXTURE_2D, textureID); // Set as the current texture
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA,
renderer.getViewportRegion().getViewportSizePixels()[0],
renderer.getViewportRegion().getViewportSizePixels()[1],
0, GL_BGRA, GL_UNSIGNED_BYTE, renderer.getBuffer());
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glDisable(GL_TEXTURE_2D);
return app.exec();
}
A simple example of rendering a local scene with SoWin, Coin3D ( download SoWin from here )
SoSeparator * CreateScene(SoSeparator* root)
{
root->ref();
root->setName("root_node");
SoPerspectiveCamera * camera = new SoPerspectiveCamera;
camera->setName("simple_camera");
SbRotation cameraRotation = SbRotation::identity();
cameraRotation *= SbRotation(SbVec3f(1, 0, 0), -0.4f);
cameraRotation *= SbRotation(SbVec3f(0, 1, 0), 0.4f);
camera->orientation = cameraRotation;
SoCone* cone1= new SoCone();
cone1->setName("Cone1");
SoBaseColor * color = new SoBaseColor;
color->setName("myColor");
root->addChild(camera);
root->addChild(cone1);
root->addChild(color);
return root;
}
Render scene created above
void RenderLocal()
{
HWND window = SoWin::init("IvExample");
if (window==NULL) exit(1);
SoWinExaminerViewer * viewer = new SoWinExaminerViewer(window);
SoSeparator * root = new SoSeparator;
auto* data = CreateScene(root);
data->getNumChildren();
viewer->setSceneGraph(root);
viewer->show();
SoWin::show(window);
SoWin::mainLoop();
delete viewer;
root->unref();
}
Render scene from iv file
int RenderFile()
{
HWND window = SoWin::init("Iv");
if (window==NULL) exit(1);
SoWinExaminerViewer * viewer = new SoWinExaminerViewer(window);
SoInput sceneInput;
if ( !sceneInput.openFile( "example.iv" ) )
return -1;
if ( !sceneInput.openFile(cPath) )
return -1;
SoSeparator *root =SoDB::readAll( &sceneInput );
root->ref();
viewer->setSceneGraph(root);
viewer->show();
SoWin::show(window);
SoWin::mainLoop();
delete viewer;
}
Does any one have a working example of PBO's (Pixel Buffer Objects) + SDL2.0 (Simple DirectMedia Layer) + OpenGL?
The reasons is to get asynchronous GPU -> CPU downloading with GLGetPixels and thus get a performance boost.
Here's my attempt. There's no measured difference at all with use_pbo = false or true. And I've used PBOs with GLUT before on the same machine so I know that my hardware supports it.
I've looked a lot at the http://www.songho.ca/opengl/gl_pbo.html tutorial but don't see that i'm doing anything wrong.
bool use_pbo = true; //remember to wait a bit for fps to pick up speed
if(SDL_Init(SDL_INIT_EVENTS) != 0) throw "SDL_Init";
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
sdl_window = SDL_CreateWindow("", 10, 20, window_width, window_height, SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE);
sdl_gl_context = SDL_GL_CreateContext(sdl_window);
SDL_GL_MakeCurrent(sdl_window, sdl_gl_context);
vertical_sync(false);
{
glGenBuffers(2, pbos);
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbos[0]);
glBufferData(GL_PIXEL_PACK_BUFFER, pbo_width*pbo_height*4, 0, GL_STREAM_READ);
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbos[1]);
glBufferData(GL_PIXEL_PACK_BUFFER, pbo_width*pbo_height*4, 0, GL_STREAM_READ);
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
}
//render to default frame buffer
{
glDrawBuffer(GL_BACK);
glClearColor(0.1, 0.2, 0.3, 0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
glUseProgram(simple_shader);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, transparent_texture);
glBegin(GL_TRIANGLE_STRIP);
//textcoord, color, position
glVertexAttrib2f(2, 0, 1); glVertexAttrib4f(1, 1, 0, 0, 1); glVertexAttrib2f(0, -1, -1); //bottom left
glVertexAttrib2f(2, 1, 1); glVertexAttrib4f(1, 0, 1, 0, 1); glVertexAttrib2f(0, +1, -1); //bottom right
glVertexAttrib2f(2, 0, 0); glVertexAttrib4f(1, 0, 0, 1, 1); glVertexAttrib2f(0, -1, +1); //top left
glVertexAttrib2f(2, 1, 0); glVertexAttrib4f(1, 1, 1, 0, 1); glVertexAttrib2f(0, +1, +1); //top right
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
glUseProgram(0);
glDisable(GL_BLEND);
while(true)
{
if(use_pbo)
{
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbos[1]);
GLvoid* map_buffer_data = glMapBuffer(GL_PIXEL_PACK_BUFFER, GL_READ_ONLY);
if(map_buffer_data)
{
//glRasterPos2i(-1, -1); //default, left bottom
glRasterPos2i(0, -1);
glDrawPixels(pbo_width, pbo_height, GL_BGRA, GL_UNSIGNED_BYTE, map_buffer_data);
glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
}
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
}
else
{
glRasterPos2i(0, -1); //right bottom
glDrawPixels(pbo_width, pbo_height, GL_BGRA, GL_UNSIGNED_BYTE, t.c);
}
}
}
SDL_GL_SwapWindow(sdl_window);
//cycle buffers
{
GLuint t = pbos[1];
pbos[1] = pbos[0];
pbos[0] = t;
}
Try to bind PBO to your texture
GL_PIXEL_UNPACK_BUFFER for pixel data being passed into OpenGL GL_PIXEL_PACK_BUFFER for pixel data being retrieved from OpenGL
Also you should define texture image with glTexImage2D
If a non-zero named buffer object is bound to the GL_PIXEL_UNPACK_BUFFER target
(see glBindBuffer) while a texture image is
specified, data is treated as a byte offset into the buffer object's data store.
I think texture mapping is a really easy task. Actually, I implemented it many times but failed in this time and don't know why? And I can guarantee that the route to load the texture is right. Any other reasons for my confusion?
Here is my code:
GLuint mytexture;
// two functions below come from NeHe's tut. I think it works well.
AUX_RGBImageRec *LoadBMP(CHAR *Filename)
{
FILE *File=NULL;
if (!Filename)
{
return NULL;
}
File=fopen(Filename,"r");
if (File)
{
fclose(File);
return auxDIBImageLoadA(Filename);
}
return NULL;
}
int LoadGLTextures()
{
int Status=FALSE;
AUX_RGBImageRec *TextureImage[1];
memset(TextureImage,0,sizeof(void *)*1);
if (TextureImage[0]=LoadBMP("NeHe.bmp"))
{
Status=TRUE;
glGenTextures(1, &mytexture);
glBindTexture(GL_TEXTURE_2D, mytexture);
glTexImage2D(GL_TEXTURE_2D, 0, 3, TextureImage[0]->sizeX, TextureImage[0]->sizeY, 0,
GL_RGB, GL_UNSIGNED_BYTE, TextureImage[0]->data);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
}
if (TextureImage[0])
{
if (TextureImage[0]->data)
{
free(TextureImage[0]->data);
}
free(TextureImage[0]);
}
return Status;
}
//next is my Init() code:
bool DemoInit( void )
{
if (!LoadGLTextures())
{
return FALSE;
}
glEnable(GL_TEXTURE_2D);
........//other init is ok
}
bool DemoRender()
{
...///render other things
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, mytexture);
glColor3f(0,0,1);
glBegin(GL_QUADS);
glTexCoord2f(0, 0); glVertex2f(0, 0);
glTexCoord2f(1, 0); glVertex2f(200, 0);
glTexCoord2f(1, 1); glVertex2f(200, 200);
glTexCoord2f(0, 1); glVertex2f(0, 200);
glEnd();
glDisable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, 0);
}
Pretty clear, ha? However, the final result only has a blue rectangle without the texture. Anybody could give me a hint?
Assuming TextureImage[0]->data is correctly populated:
However, the final result only has a blue rectangle without the texture.
You're using the default GL_MODULATE texture environment. Either switch glColor3f(0,0,1) to glColor3f(1,1,1) or use GL_DECAL.
You might also try a glPixelStorei(GL_UNPACK_ALIGNMENT, 1) before your glTexImage2D() since you're using GL_RGB for format.
The problem is I set the GL_LINE mode before I load the texture and I failed to notice that. So after I set the GL_FILL mode, everything is fine!!!
I've been attempting to render text onto an openGL window using SDL and the SDL_TTF library on windows XP, VS2010.
Versions:
SDL version 1.2.14
SDL TTF devel 1.2.10
openGL (version is at least 2-3 years old).
I have successfully created an openGL window using SDL / SDL_image and can render lines / polygons onto it with no problems.
However, moving onto text it appears that there is some flaw in my current program, I am getting the following result when trying this code here
for those not willing to pastebin here are only the crutial code segments:
void drawText(char * text) {
glLoadIdentity();
SDL_Color clrFg = {0,0,255,0}; // set colour to blue (or 'red' for BGRA)
SDL_Surface *sText = TTF_RenderUTF8_Blended( fntCourier, text, clrFg );
GLuint * texture = create_texture(sText);
glBindTexture(GL_TEXTURE_2D, *texture);
// draw a polygon and map the texture to it, may be the source of error
glBegin(GL_QUADS); {
glTexCoord2i(0, 0); glVertex3f(0, 0, 0);
glTexCoord2i(1, 0); glVertex3f(0 + sText->w, 0, 0);
glTexCoord2i(1, 1); glVertex3f(0 + sText->w, 0 + sText->h, 0);
glTexCoord2i(0, 1); glVertex3f(0, 0 + sText->h, 0);
} glEnd();
// free the surface and texture, removing this code has no effect
SDL_FreeSurface( sText );
glDeleteTextures( 1, texture );
}
segment 2:
// create GLTexture out of SDL_Surface
GLuint * create_texture(SDL_Surface *surface) {
GLuint texture = 0;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
// The SDL_Surface appears to have BGR_A formatting, however this ends up with a
// white rectangle no matter which colour i set in the previous code.
int Mode = GL_RGB;
if(surface->format->BytesPerPixel == 4) {
Mode = GL_RGBA;
}
glTexImage2D(GL_TEXTURE_2D, 0, Mode, surface->w, surface->h, 0, Mode,
GL_UNSIGNED_BYTE, surface->pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
return &texture;
}
Is there an obvious bit of code I am missing?
Thank you for any help on this subject.
I've been trying to learn openGL and SDL for 3 days now, so please forgive any misinformation on my part.
EDIT:
I notice that using
TTF_RenderUTF8_Shaded
TTF_RenderUTF8_Solid
Throw a null pointer exception, meaning that there is an error within the actual text rendering function (I suspect), I do not know how this means TTF_RenderUTF8_Blended returns a red square but I suspect all troubles hinge on this.
I think the problem is in the glEnable(GL_TEXTURE_2D) and glDisable(GL_TEXTURE_2D) functions which must be called every time the text is painted on the screen.And maybe also the color conversion between the SDL and GL surface is not right.
I have combined create_texture and drawText into a single function that displays the text properly. That's the code:
void drawText(char * text, TTF_Font* tmpfont) {
SDL_Rect area;
SDL_Color clrFg = {0,0,255,0};
SDL_Surface *sText = SDL_DisplayFormatAlpha(TTF_RenderUTF8_Blended( tmpfont, text, clrFg ));
area.x = 0;area.y = 0;area.w = sText->w;area.h = sText->h;
SDL_Surface* temp = SDL_CreateRGBSurface(SDL_HWSURFACE|SDL_SRCALPHA,sText->w,sText->h,32,0x000000ff,0x0000ff00,0x00ff0000,0x000000ff);
SDL_BlitSurface(sText, &area, temp, NULL);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, sText->w, sText->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, temp->pixels);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glEnable(GL_TEXTURE_2D);
glBegin(GL_QUADS); {
glTexCoord2d(0, 0); glVertex3f(0, 0, 0);
glTexCoord2d(1, 0); glVertex3f(0 + sText->w, 0, 0);
glTexCoord2d(1, 1); glVertex3f(0 + sText->w, 0 + sText->h, 0);
glTexCoord2d(0, 1); glVertex3f(0, 0 + sText->h, 0);
} glEnd();
glDisable(GL_TEXTURE_2D);
SDL_FreeSurface( sText );
SDL_FreeSurface( temp );
}
screenshot
I'm initializing OpenGL as follows:
int Init(){
glClearColor( 0.1, 0.2, 0.2, 1);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho( 0, 600, 300, 0, -1, 1 );
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
if( glGetError() != GL_NO_ERROR ){
return false;
}
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_COLOR, GL_ONE_MINUS_SRC_ALPHA);
}
I think you should just add glEnable(GL_BLEND), because the code for the text surface says TTF_RenderUTF8_Blended( fntCourier, text, clrFg ) and you have to enable the blending abilities of opengl.
EDIT
Okay, I finally took the time to put your code through a compiler. Most importantly, compiler with -Werror so that warning turn into errors
GLuint * create_texture(SDL_Surface *surface) {
GLuint texture = 0;
/*...*/
return &texture;
}
I didn't see it first, because that's something like C coder's 101 and is quite unexpected: You must not return pointers to local variables!. Once the functions goes out of scope the pointer returned will point to nonsense only. Why do you return a pointer at all? Just return a integer:
GLuint create_texture(SDL_Surface *surface) {
GLuint texture = 0;
/*...*/
return texture;
}
Because of this you're also not going to delete the texture afterward. You upload it to OpenGL, but then loose the reference to it.
Your code misses a glEnable(GL_TEXTURE_2D) that's why you can't see any effects of texture. However your use of textures is suboptimal. They way you did it, you recreate a whole new texture each time you're about to draw that text. If that happens in a animation loop, you'll
run out of texture memory rather soon
slow it down significantly
(1) can be addressed by not generating a new texture name each redraw
(2) can be addresses by uploading new texture data only when the text changes and by not using glTexImage2D, but glTexSubImage2D (of course, if the dimensions of the texture change, it must be glTexImage2D).
EDIT, found another possible issue, but first fix your pointer issue.
You should make sure, that you're using GL_REPLACE or GL_MODULATE texture environment mode. If using GL_DECAL or GL_BLEND you end up with red text on a red quad.
There was leaking memory of of the function in my previous post and the program was crashing after some time...
I improved this by separating the texture loading and displaying:
The first function must be called before the SDL loop.It loads text string into memory:
Every string loaded must have different txtNum parameter
GLuint texture[100];
SDL_Rect area[100];
void Load_string(char * text, SDL_Color clr, int txtNum, const char* file, int ptsize){
TTF_Font* tmpfont;
tmpfont = TTF_OpenFont(file, ptsize);
SDL_Surface *sText = SDL_DisplayFormatAlpha(TTF_RenderUTF8_Solid( tmpfont, text, clr ));
area[txtNum].x = 0;area[txtNum].y = 0;area[txtNum].w = sText->w;area[txtNum].h = sText->h;
glGenTextures(1, &texture[txtNum]);
glBindTexture(GL_TEXTURE_2D, texture[txtNum]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, sText->w, sText->h, 0, GL_BGRA, GL_UNSIGNED_BYTE, sText->pixels);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
SDL_FreeSurface( sText );
TTF_CloseFont(tmpfont);
}
The second one displays the string, must be called in the SDL loop:
void drawText(float coords[3], int txtNum) {
glBindTexture(GL_TEXTURE_2D, texture[txtNum]);
glEnable(GL_TEXTURE_2D);
glBegin(GL_QUADS); {
glTexCoord2f(0, 0); glVertex3f(coords[0], coords[1], coords[2]);
glTexCoord2f(1, 0); glVertex3f(coords[0] + area[txtNum].w, coords[1], coords[2]);
glTexCoord2f(1, 1); glVertex3f(coords[0] + area[txtNum].w, coords[1] + area[txtNum].h, coords[2]);
glTexCoord2f(0, 1); glVertex3f(coords[0], coords[1] + area[txtNum].h, coords[2]);
} glEnd();
glDisable(GL_TEXTURE_2D);
}