Drawpixels by glfw only renderer quarter screen. buffer use screen width * height - c++

I don't know how to resolve this problem, I want to use this function to make a soft-renderer engine to study 3D pipeline.
This is code:
#define GLFW_INCLUDE_GLU
#include <GLFW/glfw3.h>
int main(int argc, const char * argv[]) {
if(!glfwInit()){ //init error
return -1;
}
int width = 200; //screen width
int height = 200; //screen height
GLFWwindow* window = glfwCreateWindow(width, height, "Hello OpenGL", NULL, NULL); //create window
if(!window){ //create window fail
glfwTerminate();
return -1;
}
u_char* pixels = new unsigned char[width * height * 4]; //buffer
uint index; //temp
//set pixel
for (int count_a = 0; count_a < width; count_a++) {
for (int count_b = 0; count_b < height; count_b++) {
index = count_b * width + count_a;
pixels[index * 4 + 0] = 255; //red pixel
pixels[index * 4 + 1] = 0; //green
pixels[index * 4 + 2] = 0; //blue
pixels[index * 4 + 3] = 255; //alpha
}
}
glfwMakeContextCurrent(window);
while (!glfwWindowShouldClose(window)) { //close window
glClearColor(1.0, 1.0, 1.0, 1.0); //clear screen by white pixel
glClear(GL_COLOR_BUFFER_BIT);
glRasterPos2f(-1, -1); //set origin to screen bottom left
//glPixelZoom(2, 2); //if i use this function. renderer is right.
glDrawPixels(width, height, GL_RGBA, GL_UNSIGNED_BYTE, pixels); //draw pixel
glfwSwapBuffers(window); //swap buffer
}
glfwTerminate();
return 0;
}

I was having this issue when using GLUT, so I came to this before and while reimplementing my code using GLFW. This issue is caused by the Retina display (as a lot of other sources have figured out), and the following ended up fixing it for me:
int width = 1360*2; //screen width
int height = 768*2; //screen height
GLFWwindow* window = glfwCreateWindow(width/2, height/2, "Hello OpenGL", NULL, NULL); //create window
// ...

Related

SDL2 - Strange performance hit when rapidly changing colors

I was surprised when I found this out, initially I thought that something's wrong with my algorithms, but after closer inspections I found out that the more you change the colors the more it has an impact on the performance. Why is that?
Here's the (all) code:
#include <iostream>
#include <SDL2/SDL.h>
const int WIDTH = 1024;
const int HEIGHT = 768;
int main(int argc, char *argv[])
{
SDL_Window *window;
SDL_Renderer *renderer;
SDL_Texture *texture;
SDL_Event event;
if (SDL_Init(SDL_INIT_VIDEO) < 0)
{
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't initialize SDL: %s", SDL_GetError());
return 3;
}
window = SDL_CreateWindow("SDL_CreateTexture",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
1024, 768,
SDL_WINDOW_RESIZABLE);
renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED);
texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_RGBA8888, SDL_TEXTUREACCESS_TARGET, WIDTH, HEIGHT);
bool alive = true;
while (alive)
{
while(SDL_PollEvent(&event) > 0)
{
switch(event.type)
{
case SDL_QUIT:
alive = false;
break;
}
}
const Uint64 start = SDL_GetPerformanceCounter();
SDL_SetRenderTarget(renderer, texture);
SDL_SetRenderDrawColor(renderer, 0x00, 0x00, 0x00, 0x00);
SDL_RenderClear(renderer);
for(int i = 0; i < 10000; ++i)
{
SDL_SetRenderDrawColor(renderer, rand() % 255, rand() % 255, rand() % 255, 255);
SDL_RenderDrawPoint(renderer, rand() % WIDTH, rand() % HEIGHT);
}
SDL_SetRenderTarget(renderer, NULL);
SDL_RenderCopy(renderer, texture, NULL, NULL);
SDL_RenderPresent(renderer);
const Uint64 end = SDL_GetPerformanceCounter();
const static Uint64 freq = SDL_GetPerformanceFrequency();
const double seconds = ( end - start ) / static_cast< double >( freq );
std::cout << "Frame time: " << seconds * 1000.0 << "ms" << std::endl;
}
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
The problem is this block of code:
for(int i = 0; i < 10000; ++i)
{
SDL_SetRenderDrawColor(renderer, rand() % 255, rand() % 255, rand() % 255, 255);
SDL_RenderDrawPoint(renderer, rand() % WIDTH, rand() % HEIGHT);
}
Here's the performance with this code:
And here's the performance with this code:
SDL_SetRenderDrawColor(renderer, 255, 255, 255, 255);
for(int i = 0; i < 10000; ++i)
{
SDL_RenderDrawPoint(renderer, rand() % WIDTH, rand() % HEIGHT);
}
As you can see, there's a quite big performance impact when you change the colors a lot. In fact it gets over 100 times slower.
What am I doing wrong? Or is this how it's supposed work?
Obviously the SDL_SetRenderDrawColor function takes some time to execute and change the colors. Maybe the color data even has to be send to the GPU, which is super slow compared to regular memory access. Also the rand function is taking some performance.
Using your data there is a difference of about 550ms between one SDL_SetRenderDrawColor and 10000. So every call of this function costs about 55µs. This is super tiny and calling it a few dozen times isn't really messing with the performance but 10000 calls are obviously way more.
If you agree that one call transmits 4 bytes to the GPU, you are transmitting 40kB already just for the colors.
I asked a guy(Gorbit99) who knows stuff about SDL, and he told me that the problem was lying in using textures and SDL_SetRenderDrawColor which is working on the GPU, but per pixel interaction is faster on the CPU, so instead of using SDL_Texture you use SDL_Surface. And now this is my final code (performance ~2ms).
SDL_Surface surface = SDL_CreateRGBSurfaceWithFormat(0, WIDTH, HEIGHT, 32, SDL_PIXELFORMAT_ABGR32);
surface = SDL_CreateRGBSurfaceWithFormat(0, WIDTH, HEIGHT, 32, SDL_PIXELFORMAT_ABGR32);
uint32_t* colors = (uint32_t*)surface->pixels;
for( unsigned int i = 0; i < 1000; i++ )
{
int x = rand() % WIDTH;
int y = rand() % HEIGHT;
int offset = ( WIDTH * y ) + x;
colors[ offset ] = 0x00ff00ff; // 0xrrggbbaa
}
SDL_Texture *texture = SDL_CreateTextureFromSurface(renderer, surface);
SDL_RenderCopy(renderer, texture, NULL, NULL);
SDL_RenderPresent(renderer);
SDL_DestroyTexture(texture);
SDL_FreeSurface(surface);

GDIPlus DrawImage white color is getting black

I'm using GDIPlus to scale image from a buffer.
I'm taking a screenshot of my desktop.
Here is my code:
BYTE *Duplication::scaleBuffer(unsigned char *data, int width, int height)
{
Gdiplus::Bitmap bitmap(width, height, 4 * width, PixelFormat32bppARGB, data);
INT n_width = mWidthResolution;
INT n_height = mHeightResolution;
double ratio = ((double)width) / ((double)height);
if (width > height) {
n_height = (double)n_width / ratio;
}
else {
n_width = n_height * ratio;
}
Gdiplus::Bitmap newBitmap(n_width, n_height, bitmap.GetPixelFormat());
Gdiplus::Graphics graphics(&newBitmap);
graphics.SetInterpolationMode(Gdiplus::InterpolationModeBilinear);
graphics.DrawImage(&bitmap, 0, 0, n_width, n_height);
Gdiplus::Rect rect(0, 0, newBitmap.GetWidth(), newBitmap.GetHeight());
Gdiplus::BitmapData bitmapData;
BYTE *buffer = nullptr;
if (Gdiplus::Ok == newBitmap.LockBits(&rect, Gdiplus::ImageLockModeRead, PixelFormat32bppARGB, &bitmapData)) {
int len = bitmapData.Height * std::abs(bitmapData.Stride);
buffer = new BYTE[len];
RtlZeroMemory(buffer, len);
memcpy(buffer, bitmapData.Scan0, len);
newBitmap.UnlockBits(&bitmapData);
}
return buffer;
}
On windows with white background, I got a black background and color is corrupted.. It happens only on some program like "File Browser".. I don't understand why ...

Trouble getting 8-bit palette output working with SDL 2.0

Here's my code, using SDL 2.0.4 on OSX 10.11.4:
SDL_Surface *output_surface = SDL_CreateRGBSurface(0, width, height, 8, 0, 0, 0, 0);
SDL_Texture *output_texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_RGB24, SDL_TEXTUREACCESS_STREAMING, width, height);
SDL_Color c[256];
// Setting each color to red as a test.
for(u8 i = 255; i--;) {
c[i].r = 255;
c[i].g = 0;
c[i].b = 0;
}
SDL_SetPaletteColors(output_surface->format->palette, c, 0, 256);
Then later...
SDL_Rect r = {
.x = 0,
.y = 0,
.w = width,
.h = height
};
// Doesn't fill with red.
SDL_FillRect(output_surface, &r, 4);
SDL_UpdateTexture(output_texture, NULL, output_surface->pixels, output_surface->pitch);
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, output_texture, NULL, NULL);
SDL_RenderPresent(renderer);
What I would expect to see is the full window all red but I'm getting something entirely different. Changing the color number passed to SDL_FillRect shows that I'm getting a grayscale palette (0 is black, 255 is white) even though SDL_SetPaletteColors doesn't return an error and i've looped through output_surface->format->palette->colors to verify the palette's been changed.
What am I missing here?
edit: I was asked to post an entire program. Here it is:
int main(int argc, const char *argv[]) {
SDL_Window *window = NULL;
SDL_Renderer *renderer = NULL;
SDL_Surface *output_surface = NULL;
SDL_Texture *output_texture = NULL;
int width = 640;
int height = 480;
if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER) < 0) return 0;
window = SDL_CreateWindow("Sample", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, width, height, 0);
renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_PRESENTVSYNC | SDL_RENDERER_ACCELERATED);
output_surface = SDL_CreateRGBSurface(0, width, height, 8, 0, 0, 0, 0);
output_texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_RGB24, SDL_TEXTUREACCESS_STREAMING, width, height);
SDL_Color c[256];
for(u8 i = 255; i--;) {
c[i].r = 255;
c[i].g = 0;
c[i].b = 0;
c[i].a = 255;
}
SDL_SetPaletteColors(output_surface->format->palette, c, 0, 255);
SDL_Rect r = {
.x = 0,
.y = 0,
.w = width,
.h = height
};
bool running = true;
while(running) {
SDL_Event event;
while(SDL_PollEvent(&event)) {
switch(event.type){
case SDL_KEYDOWN:
running = false;
break;
}
}
SDL_FillRect(output_surface, &r, 124);
SDL_UpdateTexture(output_texture, NULL, output_surface->pixels, output_surface->pitch);
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, output_texture, NULL, NULL);
SDL_RenderPresent(renderer);
}
SDL_FreeSurface(output_surface);
SDL_DestroyTexture(output_texture);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
Passing 0 to SDL_FillRect is black, 255 is white, and any number in-between is a shade of grey.
Alright, found the solution.
Remove this line:
output_texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_RGB24, SDL_TEXTUREACCESS_STREAMING, width, height);
And instead add this line somewhere after the call to SDL_SetPaletteColors or after you change the surfaces' pixels (like in the game loop):
output_texture = SDL_CreateTextureFromSurface(renderer, output_surface);

Confused on SDL_CreateRGBSurface() width and height parameters

So, after fooling around with the width and height numbers on the SDL_CreateRGBSurface() function, i am really confused on how they work. According to SDL Wiki the width and height refer to the width and height of the surface, however when I say SCREENWIDTH / 2, or SCREENHEIGHT / 2, it shows bigger than without the division. This is the code I was messing around with:
#include <iostream>
#include <SDL.h>
const int WIN_WIDTH = 640;
const int WIN_HEIGHT = 480;
int main(int argc, char **argv){
if (SDL_Init(SDL_INIT_EVERYTHING) != 0){
std::cerr << "SDL_Init failed: " << SDL_GetError() << "\n";
return 1;
}
SDL_Window *win = SDL_CreateWindow("Rendering to a texture!", SDL_WINDOWPOS_CENTERED,
SDL_WINDOWPOS_CENTERED, WIN_WIDTH, WIN_HEIGHT, 0);
SDL_Renderer *renderer = SDL_CreateRenderer(win, -1,
SDL_RENDERER_ACCELERATED | SDL_RENDERER_TARGETTEXTURE);
SDL_Surface* s;
SDL_Color c = { 155, 0, 0 };
SDL_Rect r = { 0, 0, 100, 100 };
s = SDL_CreateRGBSurface(0, WIN_WIDTH / 2, WIN_HEIGHT, 32, 0, 0, 0, 0);
SDL_FillRect(s, &r, SDL_MapRGB(s->format, c.r, c.g, c.b));
SDL_Texture* t;
t = SDL_CreateTextureFromSurface(renderer, s);
SDL_SetRenderDrawColor(renderer, 0xFF, 0xFF, 0xFF, 0xFF);
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, t, NULL, NULL);
SDL_RenderPresent(renderer);
SDL_Delay(2000);
SDL_FreeSurface(s);
SDL_DestroyTexture(t);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(win);
SDL_Quit();
return 0;
}
All I really want is a rect with a width of 100 and height of 100, but to have it appear correctly on the screen, the width and height must be specified to the width and height of the window. Why is that?
SDL_RenderCopy will stretch the source texture to fit into the destination renderer. So it will stretch your surface over the entire window. If you want to draw a 100x100 pixel rectangle, your SDL surface should be the same size as the window you're copying it to; that way no stretching will take place and the surface will be presented 1:1.

SDL 2 program uses 1.4 GB of memory?

Okay, so I've been working on this little bouncing DVD logo thingy and I'm running to it slowly taking up more and more memory. Eventually it ends up taking a whopping 1.4 GB then slows down and crashes. Here is the code, what is wrong with it that causes it to do this?
#include <iostream>
#include <stdlib.h>
#include <time.h>
#include <SDL2/SDL.h>
#include <SDL2_ttf/SDL_ttf.h>
#include <SDL2_image/SDL_image.h>
// This sets ups the display.
SDL_Window* window = SDL_CreateWindow("DVD Thingy", 100, 100,
800, 600, SDL_WINDOW_SHOWN
| SDL_RENDERER_ACCELERATED
| SDL_RENDERER_PRESENTVSYNC);
SDL_Renderer* screen = SDL_CreateRenderer(window, -1, 0);
void drawText(char text[], int origX, int origY, SDL_Renderer* ren, TTF_Font* font, SDL_Color color) {
SDL_Surface* surfaceMessage = TTF_RenderText_Blended(font, text, color);
SDL_Texture* Message = SDL_CreateTextureFromSurface(ren, surfaceMessage);
int w = surfaceMessage->w;
int h = surfaceMessage->h;
SDL_Rect messageRect = {origX, origY, w, h};
SDL_RenderCopy(ren, Message, NULL, &messageRect);
SDL_DestroyTexture(Message);
}
int main() {
// This initializes the font class.
srand(time(NULL));
TTF_Init();
int skyboxColor = 240;
bool done = false;
int dirX = 1, dirY = 1;
TTF_Font* font = TTF_OpenFont("./Impact.ttf", 18);
TTF_SetFontOutline(font, 1);
int dvdX = rand() % 800, dvdY = rand() % 600-20;
SDL_Color white = {255, 255, 255};
SDL_Event event;
while (!done) {
while (SDL_PollEvent(&event)) {
switch(event.type) {
case SDL_QUIT:
SDL_Quit();
return 0;
default:
break;
}
}
dvdX += dirX;
dvdY += dirY;
if (dvdX > 770) {
dirX = -1;
}
if (dvdX < 0) {
dirX = 1;
}
if (dvdY < -3) {
dirY = 1;
}
if (dvdY > 580) {
dirY = -1;
}
SDL_SetRenderDrawColor( screen, 0, 0, 0, 255);
SDL_RenderClear(screen);
drawText("DVD", dvdX, dvdY, screen, font, white);
SDL_RenderPresent(screen);
SDL_Delay (1/1000 * 60);
}
return 0;
}
It would appear that in the drawText() function you are creating a new SDL_Surface by means of a call to TTF_RenderText_Blended().
You must ensure to free this surface when you are finished with it, which would appear to be at the end of the function it is created in. You already destroy the texture which you create from the surface so all you need to add is one line after that:
SDL_DestroyTexture(Message);
SDL_FreeSurface(surfaceMessage); <- Free the surface
As drawText() was being called constantly in the main while loop, it was bloating memory with SDL_Surfaces.
Just one other point, as you don't seem to be changing the text from "DVD" you could create the texture once and then just draw it where ever you need to. This would be much more efficient than creating, drawing and then destroying every single drame.