Drawing sprite on top of a Tile Map - c++

So I have a 2D array which acts as a map for my tiles to be drawn.
int sMap[12][20] = {
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
{1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1},
{1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1},
{1, 0, 1, 0, 1, 2, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1},
{1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1},
{1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1},
{1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 2, 0, 0, 1, 0, 0, 0, 1},
{1, 1, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 1, 0, 1, 1, 2, 2, 2, 1},
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1},
{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
};
Once my tiles have been loaded in, I use this function() to place the tiles:
for (int y = 0; y < 12; y++){
for (int x = 0; x < 20; x++){
if (sMap[y][x] == 1)
glBindTexture( GL_TEXTURE_2D, brick1);
else if (sMap[y][x] == 2)
glBindTexture( GL_TEXTURE_2D, brick2);
else
glBindTexture( GL_TEXTURE_2D, wall );
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f); glVertex3f(float(x + offsetx), float(MAP_SIZEY - (y + offsety)), 0.0f);
glTexCoord2f(1.0f, 0.0f); glVertex3f(float(x + 1 + offsetx), float(MAP_SIZEY - (y + offsety)), 0.0f);
glTexCoord2f(1.0f, 1.0f); glVertex3f(float(x + 1 + offsetx), float(MAP_SIZEY - (y + 1 + offsety)), 0.0f);
glTexCoord2f(0.0f, 1.0f); glVertex3f(float(x + offsetx), float(MAP_SIZEY - (y + 1 + offsety)), 0.0f);
glEnd();
}
}
I think I may have confused myself with the coordinate system of the tiles because when I draw a basic OpenGL square which acts as a sprite, I just get a black screen upon running the program.
I'm unsure whether this means the scale of the sprite to the tiles is wrong, or whether the sprite and tiles are on different Zplanes...
I would appreciate if someone could explain the coordinate system in case I don't understand it as much as I thought and also advise me how to draw an OpenGLsquare on the same coordinates.
Currently I have this to draw my basic sprite:
struct RECT{float x, y, w, h;};
RECT sprite = {0, 0, 10, 10};
void drawSprite (RECT rect){
glBegin(GL_QUADS);
glColor3f(1.0f, 0.0f, 0.0f);
glVertex3f(rect.x, rect.y, 0.0);
glVertex3f(rect.x, rect.y+rect.h, 0.0);
glVertex3f(rect.x+rect.w, rect.y+rect.h, 0.0);
glVertex3f(rect.x+rect.w, rect.y, 0.0);
glEnd();
}
EDIT:
resize screen:
glViewport(0,0,width,height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45.0f,(GLfloat)width/(GLfloat)height,0.1f,20.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
draw scene:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
gluLookAt(10.0f, 8.0f, 20.0f, 10.0f, 8.0f, 0.0f, 0.0f, 1.0f, 0.0f);
glTranslatef(5.0f,4.0f,0.0f);
draw_tiles();
draw_sprite();

In the draw_tiles function it looks like you might be passing incorrect coordinates - maybe you should be multiplying the x and y values by your tile size.
Also try turning off depth testing and backface culling to help resolve your black screen problem.
glDisable( GL_DEPTH_TEST );
glDisable( GL_CULL_FACE );

Related

C++ OpenGL I can't see a cube on the screen

I can't see a cube on the screen.I located it with glTranslatef. I got it to stop somewhere with LoadIdentity. I wanted to get a camera by changing the Matrix Modes, but the cube does not appear.
Code:
#include <Windows.h>
#include <gl/GL.h>
#include <gl/GLU.h>
#include <GLFW/glfw3.h>
#include <cstdio>
int width = 1280;
int height = 720;
GLfloat vertices[] = {
-1, -1, -1, -1, -1, 1, -1, 1, 1, -1, 1, -1,
1, -1, -1, 1, -1, 1, 1, 1, 1, 1, 1, -1,
-1, -1, -1, -1, -1, 1, 1, -1, 1, 1, -1, -1,
-1, 1, -1, -1, 1, 1, 1, 1, 1, 1, 1, -1,
-1, -1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1,
-1, -1, 1, -1, 1, 1, 1, 1, 1, 1, -1, 1
};
GLfloat colors[] =
{
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0
};
void keyboard(GLFWwindow* window, int key, int scancode, int action, int mods) {
if (action == GLFW_PRESS)
if (key == GLFW_KEY_ESCAPE)
glfwSetWindowShouldClose(window, GL_TRUE);
}
void drawCube() {
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
glVertexPointer(3, GL_FLOAT, 0, vertices);
glColorPointer(3, GL_FLOAT, 0, colors);
glDrawArrays(GL_QUADS, 0, 24);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_COLOR_ARRAY);
}
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(width, height, "C++ OpenGL Test Area", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
glfwSetKeyCallback(window, keyboard);
glEnable(GL_DEPTH_TEST);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
glViewport(0, 0, width, height);
/* Render here */
glClearColor(0.0, 192/256, 1, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluLookAt(1, 1, 1, 0, 0, 0, 0, 1, 0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glTranslatef(0, 0, 5);
drawCube();
glFlush();
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwTerminate();
return 0;
}
I thought it was because I couldn't draw the cube. I checked the codes again, but there is no problem drawing the cube.
The cube is not in the viewing volume. If you do not specify a projection matrix, the viewing volume is a unique cube around the camera position, aligned along the line of sight. Any geometry that is not in the viewing volume is clipped. You must set up a projection matrix. The projection matrix defines the volume in the scene that is projected onto the viewport. An orthographic projection matrix can be set with glOrtho. A perspective projection matrix can be set with glFrustum or gluPerspective. The perspective projection matrix defines a Viewing frustum. e.g.:
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
float aspect = (float)width / (float)height;
gluPerspective(90.0, aspect, 0.1, 10.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(4, 4, 4, 0, 0, 0, 0, 1, 0);
drawCube();

OpenGL: How to resolve the lighting problem of Cube?

// Vertices information
GLfloat vertices[] = { 1, 1, 1, -1, 1, 1, -1,-1, 1, 1,-1, 1, // (front)
1, 1, 1, 1,-1, 1, 1,-1,-1, 1, 1,-1, // (right)
1, 1, 1, 1, 1,-1, -1, 1,-1, -1, 1, 1, // (top)
-1, 1, 1, -1, 1,-1, -1,-1,-1, -1,-1, 1, // (left)
-1,-1,-1, 1,-1,-1, 1,-1, 1, -1,-1, 1, // (bottom)
1,-1,-1, -1,-1,-1, -1, 1,-1, 1, 1,-1 }; // (back)
// Normal information
GLfloat normals[72] = { 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, // (front)
1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, // (right)
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, // (top)
-1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, // (left)
0,-1, 0, 0,-1, 0, 0,-1, 0, 0,-1, 0, // (bottom)
0, 0,-1, 0, 0,-1, 0, 0,-1, 0, 0,-1 }; // (back)
// drawing cube
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_VERTEX_ARRAY);
glNormalPointer(GL_FLOAT, 0, normals);
glVertexPointer(3, GL_FLOAT, 0, vertices);
glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_BYTE, indices);
glPopMatrix();
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
I want to make sure there is no color difference between the two sides.
Is there a solution for this?
The issue is causes, because you've activated the flat shading mode (see Detecting legacy OpenGL and glShadeModel).
Thus the light is computed for the vertices of the triangle primitive, but the color of the triangle is determined by the Provoking vertex.
Switch to the the smooth (GL_SMOOTH) shading model and ensure that the light is diffuse (GL_DIFFUSE parameter - see glLight):
glShadeModel(GL_SMOOTH);
See the difference between shading mode GL_FLAT (left) and GL_SMOOTH (right):
Another option would be to render GL_QUADS primitives instead of GL_TRIANGLES primitives:
glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_INT, indices)
glDrawArrays(GL_QUADS, 0, 24);
Again, GL_FLAT (left) and GL_SMOOTH (right):

C++ make function return array cross file

I'm looking to make a function return an array of set size, and assign the value of that array to another array. The function is located in one file and the array I will be assigning it's return value to is located in another.
I'm attempting to do this in an SFML project. Whenever I call the array function, my program freezes and stops responding.
generate_maze.cpp:
#include <SFML/Graphics.hpp>
#include "generate_maze.h"
char* generate_maze() {
char test_maze[169] = {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0,
0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0,
0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0,
0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0,
0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0,
0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
};
return test_maze;
}
extract of main.cpp:
#include <stdlib.h>
#include <stdio.h>
#include <time.h>
#include <SFML/Graphics.hpp>
#include "controls.h"
#include "generate_maze.h"
...
char* maze = generate_maze();
std::cout << (int) maze[0] << std::endl;
generate_maze.h:
char* generate_maze();
Thanks in advance for the help.
The code does not work because generate_maze() returns a pointer to a local variable, which is destroyed on exiting the function. To make it work one could design generate_maze() to allocate an array of char via new, then the allocated array survives after generate_maze() is finished. But that would also require to explicitly free the memory, after char *maze is no longer used.
A much better c++-like solution is the following.
If the size of the array is fixed, you should better use std::array to store the result of generate_maze(), as:
#include <array>
std::array<char, 169> generate_maze() {
return { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0,
0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0,
0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0,
0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0,
0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0,
0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
};
}
If the size of the array returned by generate_maze() is not fixed, the correct way is to use std::vector and declare generate_maze() as
#include <vector>
std::vector<char> generate_maze();
generate_maze returns the address of test_maze[0], which is a local variable. You should never ever return addresses of (or references to) local variables from a function.
Allocate the maze on the heap instead and return that:
char* generate_maze() {
char local[] = {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0,
0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0,
0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0,
0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0,
0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0,
0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0,
0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0,
0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
};
char* ret = new char[sizeof local];
memcpy(ret, local, sizeof local);
return ret;
}
Alternatively, just return a std::vector which does its own memory management.
std::vector<char> generate_maze() {
return {0, 0, 0, ...};
}
generate_maze() returns pointer to local array, which is destroyed when function exits.
To solve this problem, you can define test_maze static: static char test_maze[].
This will create one permanent array for all generaze_maze() function calls. If you going to implement generate_maze() to return different maze arrays, better use std::vector or std::array.

Nested glCallList in OpenGL c++

I am currently trying to program Pacman in C++ using OpenGL. I am currently trying to create a display list to draw the maze. My thoughts were to create a square tile and then simply display said tile if matrix indicates a wall segment at some position. My code for this is:
const unsigned int FREE = 0;
const unsigned int WALL = 1;
const unsigned int GHOST_WALL = 2;
const unsigned int matrix[22][19] = {
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
{1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1},
{1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1},
{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1},
{1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1},
{1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1},
{1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1},
{1, 1, 1, 1, 0, 1, 0, 1, 1, 2, 1, 1, 0, 1, 0, 1, 1, 1, 1},
{0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0},
{1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1},
{1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1},
{1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1},
{1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1},
{1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1},
{1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1},
{1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1},
{1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1},
{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}
};
GLuint tile_handle = 0;
GLuint maze_handle = 0;
/*
* Initialize maze by precompiling its components
*/
void maze_init() {
// request a single display list handle for the tile
tile_handle = glGenLists(1);
glNewList(tile_handle, GL_COMPILE);
glColor3f(0, 0, 0.7f); // Blue
glBegin(GL_QUADS);
glVertex2i(0,0);
glVertex2i(1,0);
glVertex2i(1,1);
glVertex2i(0,1);
glEnd();
glEndList();
// request a single display list handle for the entire maze
maze_handle = glGenLists(1);
glNewList(maze_handle, GL_COMPILE);
glColor3f(0, 0, 0.7f); // Blue
for(int row = 0; row < 22; row--) {
for(int col = 0; col < 19; col++) {
if(matrix[21-row][col] == WALL) {
glPushMatrix();
glScaled(30, 30, 1);
glTranslated(col, row, 0);
glCallList(tile_handle);
glPopMatrix();
}
}
}
glEndList();
}
I then use maze_init() in my main initialisation method and use the call glCallList(maze_handle); in my main display function. This compiles fine but when I try to run it, it gives me a Bus error: 10 error.
Now I tried the same without the loops in the definition of glNewList(maze_handle, GL_COMPILE); and it ran fine. My question is therefore this: why do the loops (or the conditional statement) inside the call list create a bus error?
I tried a simple loop inside glNewList(maze_handle, GL_COMPILE); that made calls to DISTINCT display lists and it ran fine. So is the problem here that I make a repeated call to the same display list? For some reason I just can't seem to find the problem here ...

C++ 2D Array Map for Tiling Output Inverted

I have created a 2D array that represents a map for my tiles to be placed:
int sMap[12][20] = {
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
{1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1},
{1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1},
{1, 0, 1, 0, 1, 2, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1},
{1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1},
{1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1},
{1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 2, 0, 0, 1, 0, 0, 0, 1},
{1, 1, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 1, 0, 1, 1, 2, 2, 2, 1},
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1},
{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
};
Once my tiles have been loaded in, I use this function() to place the tiles:
for (int y = 0; y < 12; y++){ //for (int y = 11; y >= 0; y--){
for (int x = 0; x < 20; x++){ //for (int x = 19; x >= 0; x--){
if (sMap[y][x] == 1)
glBindTexture( GL_TEXTURE_2D, brick1);
else if (sMap[y][x] == 2)
glBindTexture( GL_TEXTURE_2D, brick2);
else
glBindTexture( GL_TEXTURE_2D, wall );
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f); glVertex3f(float(x), float(y), 0.0f);
glTexCoord2f(1.0f, 0.0f); glVertex3f(float(x + 1), float(y), 0.0f);
glTexCoord2f(1.0f, 1.0f); glVertex3f(float(x + 1), float(y + 1), 0.0f);
glTexCoord2f(0.0f, 1.0f); glVertex3f(float(x), float(y + 1), 0.0f);
glEnd();
}
}
For some reason, my Map is inverted... (So the bottom line of digits in the map above is the top line of tiles on screen and the second line up on the map is the second line of tiles down on screen).
Its my understanding, you iterate through selecting each digit and assign the correct tile. I've tried changing the x/y's but that rotates the map on screen so its 12 tiles on the x and 20 tiles on the y.
I want my tiles on screen to represent the map above. Any ideas how to fix this? Thanks.
Problem is in the fact that OpenGL actually has the coordinate system that point (0, 0) is in the lower-left part of the viewport. So if you want to represent that matrix in the code the same way as it should draw, you need to calculate y coordinate for drawing as <height of viewport> - (calculated y). How to get that height - it's up to you, probably you defined it earlier in the code.
For instance, float(y + 1 + offsety) will become float(HEIGHT - (y + 1 + offsety)).
EDIT
Oh, I just realized - you have that loop, so, you can put
for (int y = 11; y >= 0; y--){
instead existing start of outer loop and that's it. But, anyway, it's good to know that it is NOT accidentally inverted as you thought - it's just up to coordinate system.