OpenGL, some normals reversed despite counterclockwise order - c++

In the code below, I don't understand why some faces have their normals reversed.
The triangles looks ordered in the anti-clockwise direction, but some faces remain black.
When I modify the fragment shader with color = -vnormal; the two black faces are rendered correctly, but not the others, obviously.
Thanks for any help given
// minimalist but fonctional code using glew, glfw, glm
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <glm/vec3.hpp>
#include <glm/vec4.hpp>
#include <glm/mat4x4.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
#include "shaders.h"
GLuint myVAO;
void createCube() {
// v6----- v5
// /| /|
// v1------v0|
// | | | |
// | |v7---|-|v4
// |/ |/
// v2------v3
const GLfloat cube_vertices[] = {
1, 1, 1, -1, 1, 1, -1,-1, 1, // v0-v1-v2 (front)
-1,-1, 1, 1,-1, 1, 1, 1, 1, // v2-v3-v0
1, 1, 1, 1,-1, 1, 1,-1,-1, // v0-v3-v4 (right)
1,-1,-1, 1, 1,-1, 1, 1, 1, // v4-v5-v0
1, 1, 1, 1, 1,-1, -1, 1,-1, // v0-v5-v6 (top)
-1, 1,-1, -1, 1, 1, 1, 1, 1, // v6-v1-v0
-1, 1, 1, -1, 1,-1, -1,-1,-1, // v1-v6-v7 (left)
-1,-1,-1, -1,-1, 1, -1, 1, 1, // v7-v2-v1
-1,-1,-1, 1,-1,-1, 1,-1, 1, // v7-v4-v3 (bottom)
1,-1, 1, -1,-1, 1, -1,-1,-1, // v3-v2-v7
1,-1,-1, -1,-1,-1, -1, 1,-1, // v4-v7-v6 (back)
-1, 1,-1, 1, 1,-1, 1,-1,-1 }; // v6-v5-v4
// normal array
const GLfloat cube_normalsI[] = {
0, 0, 1, 0, 0, 1, 0, 0, 1, // v0-v1-v2 (front)
0, 0, 1, 0, 0, 1, 0, 0, 1, // v2-v3-v0
1, 0, 0, 1, 0, 0, 1, 0, 0, // v0-v3-v4 (right)
1, 0, 0, 1, 0, 0, 1, 0, 0, // v4-v5-v0
0, 1, 0, 0, 1, 0, 0, 1, 0, // v0-v5-v6 (top)
0, 1, 0, 0, 1, 0, 0, 1, 0, // v6-v1-v0
-1, 0, 0, -1, 0, 0, -1, 0, 0, // v1-v6-v7 (left)
-1, 0, 0, -1, 0, 0, -1, 0, 0, // v7-v2-v1
0,-1, 0, 0,-1, 0, 0,-1, 0, // v7-v4-v3 (bottom)
0,-1, 0, 0,-1, 0, 0,-1, 0, // v3-v2-v7
0, 0,-1, 0, 0,-1, 0, 0,-1, // v4-v7-v6 (back)
0, 0,-1, 0, 0,-1, 0, 0,-1 }; // v6-v5-v4
// Upload per-vertex positions
GLuint positionVBO = 0;
glGenBuffers(1, &positionVBO);
glBindBuffer(GL_ARRAY_BUFFER, positionVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(cube_vertices) * sizeof(GLfloat), cube_vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// Upload per-vertex normals
GLuint normalVBO = 0;
glGenBuffers(1, &normalVBO);
glBindBuffer(GL_ARRAY_BUFFER, normalVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(cube_normalsI) * sizeof(GLfloat), cube_normalsI, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// Hook up vertex/normals buffers to a "vertex array object" (VAO)
glGenVertexArrays(1, &myVAO);
glBindVertexArray(myVAO);
// Attach position buffer as attribute 0
glBindBuffer(GL_ARRAY_BUFFER, positionVBO);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 3, 0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// Attach normal buffer as attribute 1
glBindBuffer(GL_ARRAY_BUFFER, normalVBO);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 3, 0);
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
}
int main(int argc, char** argv) {
glfwInit();
GLFWwindow* window = glfwCreateWindow(768, 768, "", NULL, NULL);
glfwMakeContextCurrent(window);
glewInit();
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE); // same problem with glEnable(GL_FRONT_AND_BACK);
glClearColor(0.8f, 0.7f, 0.5f, 1.0f);
unsigned int program = shaders::CreateShader("simple.vert", "simple.frag");
createCube();
while (glfwWindowShouldClose(window) == GL_FALSE) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glm::mat4 model = glm::translate(glm::mat4(1.0f), glm::vec3(0.0, 0.0, -4.0));
glm::mat4 view = glm::lookAt(glm::vec3(-2.0, -2.0, 0.0), glm::vec3(0.0, 0.0, -4.0), glm::vec3(0.0, 1.0, 0.0));
glm::mat4 projection = glm::perspective(45.0f, 1.0f, 0.1f, 10.0f);
glm::mat4 mvp = projection * view * model;
glUseProgram(program);
GLuint imvp = glGetUniformLocation(program, "mvp");
glUniformMatrix4fv(imvp, 1, false, glm::value_ptr(mvp));
glBindVertexArray(myVAO);
glDrawArrays(GL_TRIANGLES, 0, 36);
glBindVertexArray(0);
glUseProgram(0);
glfwSwapBuffers(window);
}
}
The vertex shader:
#version 330 core
layout (location = 0) in vec3 in_position;
layout (location = 1) in vec3 in_normal;
uniform mat4 mvp;
out vec3 vnormal;
void main() {
vnormal = in_normal;
gl_Position = mvp * vec4(in_position,1);
}
The fragment shader:
#version 330 core
in vec3 vnormal;
out vec3 color;
void main() {
color= vnormal;
}

Output colors get clamped to the 0.0-1.0 range.
So your negative normals like -1, 0, 0 end up as RGB(0,0,0) in the color buffer.

Related

C++ OpenGL I can't see a cube on the screen

I can't see a cube on the screen.I located it with glTranslatef. I got it to stop somewhere with LoadIdentity. I wanted to get a camera by changing the Matrix Modes, but the cube does not appear.
Code:
#include <Windows.h>
#include <gl/GL.h>
#include <gl/GLU.h>
#include <GLFW/glfw3.h>
#include <cstdio>
int width = 1280;
int height = 720;
GLfloat vertices[] = {
-1, -1, -1, -1, -1, 1, -1, 1, 1, -1, 1, -1,
1, -1, -1, 1, -1, 1, 1, 1, 1, 1, 1, -1,
-1, -1, -1, -1, -1, 1, 1, -1, 1, 1, -1, -1,
-1, 1, -1, -1, 1, 1, 1, 1, 1, 1, 1, -1,
-1, -1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1,
-1, -1, 1, -1, 1, 1, 1, 1, 1, 1, -1, 1
};
GLfloat colors[] =
{
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0
};
void keyboard(GLFWwindow* window, int key, int scancode, int action, int mods) {
if (action == GLFW_PRESS)
if (key == GLFW_KEY_ESCAPE)
glfwSetWindowShouldClose(window, GL_TRUE);
}
void drawCube() {
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
glVertexPointer(3, GL_FLOAT, 0, vertices);
glColorPointer(3, GL_FLOAT, 0, colors);
glDrawArrays(GL_QUADS, 0, 24);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_COLOR_ARRAY);
}
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(width, height, "C++ OpenGL Test Area", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
glfwSetKeyCallback(window, keyboard);
glEnable(GL_DEPTH_TEST);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
glViewport(0, 0, width, height);
/* Render here */
glClearColor(0.0, 192/256, 1, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluLookAt(1, 1, 1, 0, 0, 0, 0, 1, 0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glTranslatef(0, 0, 5);
drawCube();
glFlush();
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwTerminate();
return 0;
}
I thought it was because I couldn't draw the cube. I checked the codes again, but there is no problem drawing the cube.
The cube is not in the viewing volume. If you do not specify a projection matrix, the viewing volume is a unique cube around the camera position, aligned along the line of sight. Any geometry that is not in the viewing volume is clipped. You must set up a projection matrix. The projection matrix defines the volume in the scene that is projected onto the viewport. An orthographic projection matrix can be set with glOrtho. A perspective projection matrix can be set with glFrustum or gluPerspective. The perspective projection matrix defines a Viewing frustum. e.g.:
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
float aspect = (float)width / (float)height;
gluPerspective(90.0, aspect, 0.1, 10.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(4, 4, 4, 0, 0, 0, 0, 1, 0);
drawCube();

OpenGL: How to resolve the lighting problem of Cube?

// Vertices information
GLfloat vertices[] = { 1, 1, 1, -1, 1, 1, -1,-1, 1, 1,-1, 1, // (front)
1, 1, 1, 1,-1, 1, 1,-1,-1, 1, 1,-1, // (right)
1, 1, 1, 1, 1,-1, -1, 1,-1, -1, 1, 1, // (top)
-1, 1, 1, -1, 1,-1, -1,-1,-1, -1,-1, 1, // (left)
-1,-1,-1, 1,-1,-1, 1,-1, 1, -1,-1, 1, // (bottom)
1,-1,-1, -1,-1,-1, -1, 1,-1, 1, 1,-1 }; // (back)
// Normal information
GLfloat normals[72] = { 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, // (front)
1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, // (right)
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, // (top)
-1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, // (left)
0,-1, 0, 0,-1, 0, 0,-1, 0, 0,-1, 0, // (bottom)
0, 0,-1, 0, 0,-1, 0, 0,-1, 0, 0,-1 }; // (back)
// drawing cube
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_VERTEX_ARRAY);
glNormalPointer(GL_FLOAT, 0, normals);
glVertexPointer(3, GL_FLOAT, 0, vertices);
glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_BYTE, indices);
glPopMatrix();
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
I want to make sure there is no color difference between the two sides.
Is there a solution for this?
The issue is causes, because you've activated the flat shading mode (see Detecting legacy OpenGL and glShadeModel).
Thus the light is computed for the vertices of the triangle primitive, but the color of the triangle is determined by the Provoking vertex.
Switch to the the smooth (GL_SMOOTH) shading model and ensure that the light is diffuse (GL_DIFFUSE parameter - see glLight):
glShadeModel(GL_SMOOTH);
See the difference between shading mode GL_FLAT (left) and GL_SMOOTH (right):
Another option would be to render GL_QUADS primitives instead of GL_TRIANGLES primitives:
glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_INT, indices)
glDrawArrays(GL_QUADS, 0, 24);
Again, GL_FLAT (left) and GL_SMOOTH (right):

Drawing a square using OpenGL ES glDrawElements is not working

I'm trying to draw a square on Android using the code below:
void drawSquare()
{
glClear(GL_COLOR_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
GLfloat vertices[] =
{
-0.5f, -0.5f,
0.5f, -0.5f,
0.5f, 0.5f,
-0.5f, 0.5f
};
GLubyte indices[] = { 0, 1, 2, 3 };
glVertexPointer(2, GL_FLOAT, 0, vertices);
glDrawElements(GL_TRIANGLES, 4, GL_UNSIGNED_BYTE, indices);
}
Before, I call the above method I set up the display like:
bool initDisplay()
{
const EGLint attribs[] =
{
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_NONE
};
EGLint format;
EGLint numConfigs;
EGLConfig config;
EGLDisplay mDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(mDisplay, 0, 0);
eglChooseConfig(mDisplay, attribs, &config, 1, &numConfigs);
eglGetConfigAttrib(mDisplay, config, EGL_NATIVE_VISUAL_ID, &format);
ANativeWindow_setBuffersGeometry(mApp->window, 0, 0, format);
EGLSurface mSurface = eglCreateWindowSurface(mDisplay, config, mApp->window, NULL);
EGLContext mContext = eglCreateContext(mDisplay, config, NULL, NULL);
eglMakeCurrent(mDisplay, mSurface, mSurface, mContext);
eglQuerySurface(mDisplay, mSurface, EGL_WIDTH, &mWidth);
eglQuerySurface(mDisplay, mSurface, EGL_HEIGHT, &mHeight);
return true;
}
And setup OpenGL like:
bool initGL()
{
glDisable(GL_DITHER);
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST);
glClearColor(0.f, 0.f, 0.f, 1.0f);
glShadeModel(GL_SMOOTH);
glViewport(0, 0, mWidth, mHeight);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
return true;
}
However, I do not see the square on the screen. Just see a black screen. Thanks in advance for your help.
The indices
GLubyte indices[] = { 0, 1, 2, 3 };
don't specify a triangle Primitive. The specify a quad. Use the primitive type GL_QUADS respectively GL_TRIANGLE_FAN:
GLubyte indices[] = { 0, 1, 2, 3 };
glVertexPointer(2, GL_FLOAT, 0, vertices);
glDrawElements(GL_TRIANGLE_FAN, 4, GL_UNSIGNED_BYTE, indices);
or form the square by 2 triangles with the indices (0 - 1 - 2) and (0 - 2 - 3):
3 2
+------+ +
| / / |
| / / |
| / / |
+ +-------+
0 1
GLubyte indices[] = { 0, 1, 2, 0, 2, 3 };
glVertexPointer(2, GL_FLOAT, 0, vertices);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, indices);

OpenGL screen is flickering when enabling DEPTH_TEST

I'm having a problem with my OpenGL application. When I don't have DEPTH_TEST enabled it draws fine (except for the wrong draw order), but if I enable it everything starts to flicker. OpenGL doesn't give any errors. I also didn't forget the glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);. I'm pretty clueless to what causes my problem. Any help would be appreciated.
main.cpp
#include <iostream>
#include <SDL2\SDL.h>
#include <GL\glew.h>
#include <glm\glm.hpp>
#include <glm\gtc\matrix_transform.hpp>
#include <fstream>
#include <string>
#include "vertexdata.h"
#define WIDTH 800
#define HEIGHT 600
std::string filetobuf(const char*);
int main(int, char**) {
// Init the window with an OpenGL context
SDL_Window *window;
SDL_Init(SDL_INIT_EVERYTHING);
window = SDL_CreateWindow("SDL window", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, WIDTH, HEIGHT, SDL_WINDOW_OPENGL);
SDL_GLContext context = SDL_GL_CreateContext(window);
glewInit();
// Create the shaders
std::string vertexsourceString = filetobuf("tutorial2.vert");
std::string fragmentsourceString = filetobuf("tutorial2.frag");
const char* vertexsource = vertexsourceString.c_str();
const char* fragmentsource = fragmentsourceString.c_str();
GLuint vertexshader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexshader, 1, (const GLchar**)&vertexsource, 0);
glCompileShader(vertexshader);
GLuint fragmentshader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentshader, 1, (const GLchar**)&fragmentsource, 0);
glCompileShader(fragmentshader);
GLuint shaderprogram = glCreateProgram();
glAttachShader(shaderprogram, vertexshader);
glAttachShader(shaderprogram, fragmentshader);
glBindAttribLocation(shaderprogram, 0, "in_Position");
glBindAttribLocation(shaderprogram, 1, "in_Color");
glLinkProgram(shaderprogram);
glUseProgram(shaderprogram);
// Setup rectangles
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
GLuint vbo[3];
glGenBuffers(3, vbo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo[0]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, INDICES_SIZE, indices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, vbo[1]);
glBufferData(GL_ARRAY_BUFFER, VERTEX_BUFFER_SIZE, vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo[2]);
glBufferData(GL_ARRAY_BUFFER, COLOR_BUFFER_SIZE, colors, GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(1);
GLint projectionMatrixLocation = glGetUniformLocation(shaderprogram, "projectionMatrix");
glm::mat4 projectionMatrix = glm::perspective(70.0f, (float)WIDTH / (float)HEIGHT, 0.0f, 100.0f);
glUniformMatrix4fv(projectionMatrixLocation, 1, GL_FALSE, &projectionMatrix[0][0]);
GLint modelMatrixLocation = glGetUniformLocation(shaderprogram, "modelMatrix");
glClearColor(0, 0, 1, 1);
glEnable(GL_DEPTH_TEST);
bool running = true;
while (running) {
SDL_Event e;
while (SDL_PollEvent(&e)) {
if (e.type == SDL_QUIT) {
running = false;
}
}
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glm::mat4 modelMatrix(1);
modelMatrix = glm::translate(modelMatrix, glm::vec3(0, 0, -5));
modelMatrix = glm::rotate(modelMatrix, SDL_GetTicks() / 500.0f, glm::vec3(0, 1, 0));
glUniformMatrix4fv(modelMatrixLocation, 1, GL_FALSE, &modelMatrix[0][0]);
glDrawElements(GL_TRIANGLES, INDICES_SIZE, GL_UNSIGNED_BYTE, 0);
SDL_GL_SwapWindow(window);
}
SDL_GL_DeleteContext(context);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
std::string filetobuf(const char* path)
{
if (path == nullptr) {
std::cout << "No file specified!" << std::endl;
return 0;
}
std::ifstream file(path);
if (!file.is_open()) {
std::cout << "File not found! '" << path << "'" << std::endl;
return 0;
}
std::string source;
std::string line;
while (file.good()) {
std::getline(file, line);
source += line + "\n";
}
file.close();
return source;
}
vertexdata.h
#pragma once
#include <GL\GL.h>
#define INDICES_SIZE 12
#define COLOR_VECTOR_SIZE 3
#define VERTEX_VECTOR_SIZE 3
#define VERTEX_COUNT 8
#define COLOR_BUFFER_SIZE COLOR_VECTOR_SIZE * VERTEX_COUNT * sizeof(GLfloat)
#define VERTEX_BUFFER_SIZE VERTEX_VECTOR_SIZE * VERTEX_COUNT * sizeof(GLfloat)
static GLubyte indices[] = {
0, 1, 2,
2, 3, 0,
4, 5, 6,
6, 7, 4,
};
static GLfloat colors[] = {
1, 0, 0,
1, 0, 0,
1, 0, 0,
1, 0, 0,
0, 1, 0,
0, 1, 0,
0, 1, 0,
0, 1, 0,
};
static GLfloat vertices[] = {
1, 1, 1,
1, -1, 1,
-1, -1, 1,
-1, 1, 1,
1, 1, -1,
1, -1, -1,
-1, -1, -1,
-1, 1, -1,
};
Note: I removed the error checking from the main.cpp file to decrease the file size.
You can't set near plane to zero. Put something reasonable, like 0.1f instead.
glm::mat4 projectionMatrix = glm::perspective(70.0f, (float)WIDTH / (float)HEIGHT, 0.1f, 100.0f);

Drawing sprite on top of a Tile Map

So I have a 2D array which acts as a map for my tiles to be drawn.
int sMap[12][20] = {
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
{1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1},
{1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1},
{1, 0, 1, 0, 1, 2, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1},
{1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1},
{1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1},
{1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 2, 0, 0, 1, 0, 0, 0, 1},
{1, 1, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 1, 0, 1, 1, 2, 2, 2, 1},
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1},
{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
};
Once my tiles have been loaded in, I use this function() to place the tiles:
for (int y = 0; y < 12; y++){
for (int x = 0; x < 20; x++){
if (sMap[y][x] == 1)
glBindTexture( GL_TEXTURE_2D, brick1);
else if (sMap[y][x] == 2)
glBindTexture( GL_TEXTURE_2D, brick2);
else
glBindTexture( GL_TEXTURE_2D, wall );
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f); glVertex3f(float(x + offsetx), float(MAP_SIZEY - (y + offsety)), 0.0f);
glTexCoord2f(1.0f, 0.0f); glVertex3f(float(x + 1 + offsetx), float(MAP_SIZEY - (y + offsety)), 0.0f);
glTexCoord2f(1.0f, 1.0f); glVertex3f(float(x + 1 + offsetx), float(MAP_SIZEY - (y + 1 + offsety)), 0.0f);
glTexCoord2f(0.0f, 1.0f); glVertex3f(float(x + offsetx), float(MAP_SIZEY - (y + 1 + offsety)), 0.0f);
glEnd();
}
}
I think I may have confused myself with the coordinate system of the tiles because when I draw a basic OpenGL square which acts as a sprite, I just get a black screen upon running the program.
I'm unsure whether this means the scale of the sprite to the tiles is wrong, or whether the sprite and tiles are on different Zplanes...
I would appreciate if someone could explain the coordinate system in case I don't understand it as much as I thought and also advise me how to draw an OpenGLsquare on the same coordinates.
Currently I have this to draw my basic sprite:
struct RECT{float x, y, w, h;};
RECT sprite = {0, 0, 10, 10};
void drawSprite (RECT rect){
glBegin(GL_QUADS);
glColor3f(1.0f, 0.0f, 0.0f);
glVertex3f(rect.x, rect.y, 0.0);
glVertex3f(rect.x, rect.y+rect.h, 0.0);
glVertex3f(rect.x+rect.w, rect.y+rect.h, 0.0);
glVertex3f(rect.x+rect.w, rect.y, 0.0);
glEnd();
}
EDIT:
resize screen:
glViewport(0,0,width,height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45.0f,(GLfloat)width/(GLfloat)height,0.1f,20.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
draw scene:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
gluLookAt(10.0f, 8.0f, 20.0f, 10.0f, 8.0f, 0.0f, 0.0f, 1.0f, 0.0f);
glTranslatef(5.0f,4.0f,0.0f);
draw_tiles();
draw_sprite();
In the draw_tiles function it looks like you might be passing incorrect coordinates - maybe you should be multiplying the x and y values by your tile size.
Also try turning off depth testing and backface culling to help resolve your black screen problem.
glDisable( GL_DEPTH_TEST );
glDisable( GL_CULL_FACE );