Just trying to get a triangle to draw to the screen, following a c++ tutorial. Tried to run the program and I get a NullPointerException on all Opengl calls. Also, I'm following a tutorial for opengl 3 although most of my calls are for earlier versions, is this just how lwjgl is set up, with the functions residing in the version where they originated from?
package examples;
import org.lwjgl.LWJGLException;
import org.lwjgl.opengl.*;
import java.nio.*;
public class Triangle
{
// An array of 3 vectors which represents 3 vertices
static final float vertexData[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
// This will identify our vertex buffer
int vertexBufferID;
public static void main(String[] args)
{
new Triangle();
}
public Triangle()
{
// Allocate floatBuffer to hold vertex data
FloatBuffer vertexBuffer = FloatBuffer.allocate(9);
// Put float data into buffer and position ready to read
vertexBuffer.put(vertexData).position(0);
// Generate 1 buffer, put the resulting identifier in vertexbuffer
IntBuffer buffers = IntBuffer.allocate(1); // allocate
GL15.glGenBuffers(buffers);
vertexBufferID = buffers.get(0);
// Binds a buffer to the ARRAY_BUFFER(target) (1 at a time) (breaks other bonds)
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vertexBufferID);
// Give our vertices to OpenGL. (creates store for data bound to target(above))
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, vertexBuffer,GL15.GL_STATIC_DRAW);
try {
Display.setDisplayMode(new DisplayMode(800,600));
Display.create();
} catch (LWJGLException e) {
e.printStackTrace();
System.exit(0);
}
while(!Display.isCloseRequested())
{
// Render
// 1st attribute buffer : vertices
GL20.glEnableVertexAttribArray(0); // enable vertex attribute index: 0
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vertexBufferID);
// Specify location of vertex data for index 0
GL33.glVertexAttribP1ui(0, GL11.GL_FLOAT, false, 0);
// Draw the triangle!
GL11.glDrawArrays(GL11.GL_TRIANGLES, 0, 3); // Starting from vertex 0; 3 vertices total -> 1 triangle
GL20.glDisableVertexAttribArray(0);
}
}
}
I think that the problem is you have to create the Display before any openGl call.
Try to move
try {
Display.setDisplayMode(new DisplayMode(800,600));
Display.create();
} catch (LWJGLException e) {
e.printStackTrace();
System.exit(0);
}
at the top of the Triangle constructor.
Related
I'm trying to make a system that allows you to type in a position and scale and it will create a vector that automatically generates all the vertices. The problem is when I try to draw my object it just won't show up. I have used OpenGL's built-in debugging system but it didn't say anything was wrong. So then I tried to manually debug myself but everything seemed to draw just fine.
Renderer::createQuad() method:
Shape Renderer::createQuad(glm::vec2 position, glm::vec2 scale, Shader shader, Texture texture)
{
float x = position.x;
float y = position.y;
float width = scale.x;
float height = scale.y;
std::vector<float> vertices =
{
x+width, y+height, 1.0f, 1.0f, // TR
x+width, y-height, 1.0f, 0.0f, // BR
x-width, y-height, 0.0f, 0.0f, // BL
x-width, y+height, 0.0f, 1.0f // TL
};
std::vector<uint32_t> indices =
{
0, 1, 3,
1, 2, 3
};
m_lenVertices = vertices.size();
m_lenIndices = indices.size();
// these Create methods should be fine as OpenGL does not give me any error
// also I have another function that requires you to pass in the vertex data and indices that works just fine
// I bind the thing I am creating
createVAO();
createVBO(vertices);
createEBO(indices);
createTexture(texture);
createShader(shader.getVertexShader(), shader.getFragmentShader());
Shape shape;
glm::mat4 model(1.0f);
glUniformMatrix4fv(glGetUniformLocation(m_shader, "model"), 1, GL_FALSE, glm::value_ptr(model));
shape.setShader(m_shader);
shape.setVAO(m_VAO);
shape.setTexture(m_texture);
shape.setPosition(position);
return shape;
}
Renderer::draw() method:
void Renderer::draw(Shape shape)
{
if (!m_usingIndices)
{
// Unbinds any other shapes
glBindVertexArray(0);
glUseProgram(0);
shape.bindShader();
shape.bindVAO();
shape.bindTexture();
glDrawArrays(GL_TRIANGLES, 0, m_lenVertices);
}
else
{
// Unbinds any other shapes
glBindVertexArray(0);
glUseProgram(0);
shape.bindShader();
shape.bindVAO();
shape.bindTexture();
glDrawElements(GL_TRIANGLES, m_lenIndices, GL_UNSIGNED_INT, 0);
}
}
Projection matrix:
glm::mat4 m_projectionMat = glm::ortho(-Window::getWidth(), Window::getWidth(), -Window::getHeight(), Window::getHeight, 0.1f, 100.0f);
Creating then rendering the Quad:
// Creates the VBO, VAO, EBO, etc.
quad = renderer.createQuad(glm::vec2(500.0f, 500.0f), glm::vec2(200.0F, 200.0f), LoadFile::loadShader("Res/Shader/VertShader.glsl", "Res/Shader/FragShader.glsl"), LoadFile::loadTexture("Res/Textures/Lake.jpg"));
// In the main game loop we render the quad
quad.setCamera(camera); // Sets the View and Projection matrix for the quad
renderer.draw(quad);
Output:
Output of the code before
My code does not work. I think there is a way to render something on the screen without shaders but how? I heard something about modern OpenGL rendering and how OpenGL needs shaders to render. Help me.
This is my code:
import static org.lwjgl.glfw.GLFW.glfwDestroyWindow;
import static org.lwjgl.glfw.GLFW.glfwTerminate;
import java.nio.FloatBuffer;
import org.lwjgl.BufferUtils;
import org.lwjgl.glfw.GLFW;
import org.lwjgl.glfw.GLFWVidMode;
import org.lwjgl.opengl.GL;
import org.lwjgl.opengl.GL30;
import org.lwjgl.system.MemoryUtil;
public class Main {
public static long window;
public static boolean running = true;
public static void createWindow() {
GLFW.glfwInit();
GLFW.glfwDefaultWindowHints();
GLFW.glfwWindowHint(GLFW.GLFW_VISIBLE, GL30.GL_FALSE);
GLFW.glfwWindowHint(GLFW.GLFW_VISIBLE, GL30.GL_TRUE);
window = GLFW.glfwCreateWindow(600, 600, "RenderQuad", 0, 0);
GLFW.glfwMakeContextCurrent(window);
GLFWVidMode vidmode = GLFW.glfwGetVideoMode(GLFW.glfwGetPrimaryMonitor());
GLFW.glfwSetWindowPos(window, (vidmode.width() - 600) / 2, (vidmode.height() - 600) / 2);
GLFW.glfwShowWindow(window);
GL.createCapabilities();
GL30.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
}
public static int vaoID;
public static int vboID;
public static void render() {
float[] vertices = {
-0.5f, 0.5f, 0f,
-0.5f, -0.5f, 0f,
0.5f, -0.5f, 0f,
0.5f, -0.5f, 0f,
0.5f, 0.5f, 0f,
-0.5f, 0.5f, 0f
};
vaoID = GL30.glGenVertexArrays();
GL30.glBindVertexArray(vaoID);
FloatBuffer buffer = MemoryUtil.memAllocFloat(vertices.length);
buffer.put(vertices);
buffer.flip();
vboID = GL30.glGenBuffers();
GL30.glBindBuffer(GL30.GL_ARRAY_BUFFER, vboID);
GL30.glBufferData(GL30.GL_ARRAY_BUFFER, buffer, GL30.GL_STATIC_DRAW);
MemoryUtil.memFree(buffer);
GL30.glVertexAttribPointer(0, 3, GL30.GL_FLOAT, false, 0, 0);
GL30.glBindBuffer(GL30.GL_ARRAY_BUFFER, 0);
GL30.glBindVertexArray(0);
}
public static void loopCycle() {
GL30.glClear(GL30.GL_COLOR_BUFFER_BIT);
GL30.glEnableVertexAttribArray(0);
GL30.glBindVertexArray(vaoID);
GL30.glDrawArrays(GL30.GL_TRIANGLES, 0, 6);
GL30.glDisableVertexAttribArray(0);
GL30.glBindVertexArray(0);
}
public static void clean() {
GL30.glDisableVertexAttribArray(0);
GL30.glBindBuffer(GL30.GL_ARRAY_BUFFER, 0);
GL30.glBindVertexArray(0);
GL30.glDeleteBuffers(vboID);
GL30.glDeleteVertexArrays(vaoID);
glfwDestroyWindow(window);
glfwTerminate();
}
public static void loop() {
GLFW.glfwSwapBuffers(window);
GLFW.glfwPollEvents();
//GL30.glClear(GL30.GL_COLOR_BUFFER_BIT | GL30.GL_DEPTH_BUFFER_BIT);
}
public static void main(String[] args) {
createWindow();
render();
while(running) {
if(GLFW.glfwWindowShouldClose(window)) {running = false; break;}
loopCycle();
loop();
}
clean();
}
}
You do not have the array for vertex attribute 0 enabled when you draw. The array enable state for each vertex attribute is part of the VAO, and when you call glEnableVertexAttribArray it will affect the currently bound VAO.
I have no idea where this comes from, but a lot of people (and seemingly also tutorials) use a scheme like:
Setup() {
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
// ... [set up some VBOs and maybe EBO]
glVertexAttribPointer(...);
glBindVertexArray(vao);
}
Draw()
{
glBindVertexArray(vao);
glEnableVertexAttribArray(...);
glDraw...(...);
glDisableVertexAttribArray(...);
glBindVertexArray(0);
}
Now this scheme in principle works, you just mistakenly enable the array while VAO 0 is still bound, and then switch to vao for which no array is enabled at all.
But this scheme is utterly inefficient, the VAO does store this information for a reason: so that you not have to re-specify it each time you want to use it. As a result, the glEnableVertexAttribArray() belongs into the Setup function, and should only be called again if the actual set of vertex attributes for that particular VAO changes (which is never in those examples).
I heard something about modern OpenGL rendering and how OpenGL needs shaders to render.
Yes. Note that shaders were introduced to OpenGL with Version 2.0 in 2004, so that's quite a a strecht of the term modern when it comes to GPU development. You really should consider switching to a 3.2 core profile context, where all the legacy deprecated stuff leftover from the 90s is removed.
I would like to create a renderer class that can switch between 2 or more shaders without adding more and more draw calls.
What I mean is to have 2 shaders - A and B - and method that takes shader, position, size to create for example a quad.
and I want to add that data (position, size) and pass it to vertex A (so it is 1st draw calls), then add another data to vertex B(so it is 2nd draw call) and again add data to shader A (so it still should be 2 draw calls, because we have already used shader A somewhere before). And at the end go though draw calls and draw a scene.
I have a RenderData class that adds draw calls, vertex, element data, etc.
struct DrawCall
{
//it may have more data like texture, clip rect, camera, etc.
Shader* shader = nullptr;
};
struct Vertex
{
Vector2 position;
}
class RenderData
{
public:
RenderData();
~RenderData();
void Free() {
vertexBuffer.clear();
shader.clear();
drawCall.clear();
elementBuffer.clear();
}
void Draw(const Rect& dest);
void AddDrawCall();
inline DrawCall* getDrawCall() { return drawCall.size() > 0 ? &drawCall.back() : nullptr; }
void UpdateShader();
void PushShader(Shader* shader);
void PopShader();
inline Shader* getShader() { return shader.size() > 0 ? shader.back() : nullptr; }
uint currentVertexIndex = 0;
vector<Vertex> vertexBuffer; // Vertex data
vector<Shader*> shader;
vector<DrawCall> drawCall;
vector<uint> elementBuffer; // Index data
}
void RenderData::AddDrawCall()
{
DrawCall dc;
dc.shader = getShader();
drawCall.push_back(dc);
}
void RenderData::UpdateShader()
{
Shader* currentShader = getShader();
DrawCall* currentDraw = getDrawCall();
if (!currentDraw || currentDraw->shader != currentShader) {
AddDrawCall();
return;
}
DrawCall* prevDraw = drawCall.size() > 1 ? currentDraw - 1 : nullptr;
if (prevDraw->shader == currentShader) {
drawCall.pop_back();
} else { currentDraw->shader = currentShader; }
}
void RenderData::PushShader(Shader* shader)
{
this->shader.push_back(shader);
UpdateShader();
}
void RenderData::PopShader()
{
Custom_Assert(shader.size() > 0, "Cannot PopShader() with size < 0!\n");
shader.pop_back();
UpdateShader();
}
void RenderData::Draw(const Rect& dest)
{
//dest -> x, y, w and h
//setup vertices
vertexBuffer.push_back(...);
vertexBuffer.push_back(...);
vertexBuffer.push_back(...);
vertexBuffer.push_back(...);
//setup elements
elementBuffer.push_back(...);
elementBuffer.push_back(...);
elementBuffer.push_back(...);
elementBuffer.push_back(...);
elementBuffer.push_back(...);
elementBuffer.push_back(...);
}
and Renderer2D class which has few objects:
vao, vbo, ebo, RenderData
and few methods:
Create() -> it creates ebo and ebo
RenderClear() -> it Free() RenderData, sets up viewport
RenderPresent -> it creates and binds vao, binds vbo, adds vbo attributes and data, binds ebo and adds ebo data, and goes through DrawCall& drawCall : renderData.drawCall, uses shader program and draws elements;
void Renderer2D::Create()
{
//gens and binds
vbo = vbo->Create(TYPE::ARRAY, USAGE::DYNAMIC_DRAW));
//gens and binds
ebo = ebo->Create(TYPE::ELEMENT, USAGE::DYNAMIC_DRAW));
}
void Renderer2D::RenderClear()
{
setRenderViewport(0, 0, 1280, 720);
renderData.Free();
}
void Renderer2D::RenderPresent()
{
vao = vao->Create();
vbo->BindBuffer();
vbo->AddAttribute(0, 2, GL_FLOAT, false, sizeof(Vertex), (const void*)offsetof(Vertex, position));
vbo->AddData(renderData.vertexBuffer.size() * sizeof(Vertex), renderData.vertexBuffer.data());
ebo->BindBuffer();
ebo->AddData(renderData.elementBuffer.size() * sizeof(uint), renderData.elementBuffer.data());
for (auto& drawCall : renderData.drawCall) {
drawCall.shader->UseProgram();
vao->DrawElements(drawCall.elemCount, GL_UNSIGNED_INT, nullptr);
}
//delete vertex array
vao->Free();
}
how it works:
int main()
{
Renderer2D renderer2D;
renderer2D.Create();
Shader A("shader.vtx", "shader.frag");
Shader B("shader.vtx", "shader2.frag");
while(!quit) {
renderer2D.RenderClear();
//Push A shader = add 1st draw call
renderer2D->PushShader(&A);
renderer2D->Draw({ 100.0f, 100.0f, 50.0f, 50.0f });
renderer2D->PopShader();
//Push B shader = add 2nd draw call
renderer2D->PushShader(&B);
renderer2D->Draw({ 200.0f, 200.0f, 50.0f, 50.0f });
renderer2D->PopShader();
//Push A shader = do not add 3rd draw call, use already existing one
//This version adds 3rd draw call instead of using existing one
renderer2D->PushShader(&A);
renderer2D->Draw({ 400.0f, 400.0f, 50.0f, 50.0f });
renderer2D->PopShader();
renderer2D.RenderPresent();
}
return 0;
}
I would like to change it, somehow, to work as I described, but I do not know how (if it is even possible) to do it.
I just ran my first java program running JOGL. I have worked on c++ using the Glut.h library. I have a question as I am trying to understand the coordinate system on the graphic window displayed using JOGL.
Now, in c++, the window coordinate system begins at the top left corner of the window (that is where the origin is). However, when I run the below JOGL program, the origin is not at the top left corner. The below program will draw 3 lines, all of which start at the origin:
import com.sun.opengl.util.Animator;
import java.awt.Frame;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import javax.media.opengl.GL;
import javax.media.opengl.GLAutoDrawable;
import javax.media.opengl.GLCanvas;
import javax.media.opengl.GLEventListener;
import javax.media.opengl.glu.GLU;
public class Lab2 implements GLEventListener {
public static void main(String[] args) {
Frame frame = new Frame("My First JOGL Project");
GLCanvas canvas = new GLCanvas();
canvas.addGLEventListener(new Lab2());
frame.add(canvas);
frame.setSize(640, 480);
final Animator animator = new Animator(canvas);
frame.addWindowListener(new WindowAdapter() {
#Override
public void windowClosing(WindowEvent e) {
// Run this on another thread than the AWT event queue to
// make sure the call to Animator.stop() completes before
// exiting
new Thread(new Runnable() {
public void run() {
animator.stop();
System.exit(0);
}
}).start();
}
});
// Center frame
frame.setLocationRelativeTo(null);
frame.setVisible(true);
animator.start();
}
public void init(GLAutoDrawable drawable) {
// Use debug pipeline
// drawable.setGL(new DebugGL(drawable.getGL()));
GL gl = drawable.getGL();
System.err.println("INIT GL IS: " + gl.getClass().getName());
// Enable VSync
gl.setSwapInterval(1);
// Setup the drawing area and shading mode
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
gl.glShadeModel(GL.GL_SMOOTH); // try setting this to GL_FLAT and see what happens.
}
public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) {
GL gl = drawable.getGL();
GLU glu = new GLU();
if (height <= 0) { // avoid a divide by zero error!
height = 1;
}
final float h = (float) width / (float) height;
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL.GL_PROJECTION);
gl.glLoadIdentity();
//glu.gluPerspective(45.0f, h, 1.0, 20.0);
glu.gluPerspective(45.0f, h, 2.0, 20.0);
gl.glMatrixMode(GL.GL_MODELVIEW);
gl.glLoadIdentity();
}
public void display(GLAutoDrawable drawable) {
GL gl = drawable.getGL();
// Clear the drawing area
gl.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT);
// Reset the current matrix to the "identity"
gl.glLoadIdentity();
// Move the "drawing cursor" around
gl.glTranslatef(-1.5f, 0.0f, -6.0f);
// Drawing Lines
gl.glBegin(GL.GL_LINES);
gl.glColor3f(0.0f, 1.0f, 0.0f); // Set the current drawing color to green
gl.glVertex3f(0.0f, 0.0f, 0.0f); //Set first vertex of first line
gl.glVertex3f(1.0f, 0.0f, 0.0f); //Set second vertex of first line
gl.glColor3f(0.0f, 0.0f, 1.0f); // Set the current drawing color to blue
gl.glVertex3f(0.0f, 0.0f, 0.0f); //Set first vertex of second line
gl.glVertex3f(0.0f, 1.0f, 0.0f); //Set second vertex of second line
gl.glColor3f(1.0f, 0.0f, 0.0f); // Set the current drawing color to red
gl.glVertex3f(0.0f, 0.0f, 0.0f); //Set first vertex of third line
gl.glVertex3f(0.0f, 0.0f, 1.0f); //Set second vertex of third line
gl.glEnd();
// Flush all drawing operations to the graphics card
gl.glFlush();
}
public void displayChanged(GLAutoDrawable drawable, boolean modeChanged, boolean
deviceChanged) {}
}
Can someone epxlain?
I can't find a way to use correctly JOGL with Textures in my 2D game, here's my code:
Init method
public void init(GLAutoDrawable drawable) {
[...]
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
gl.glShadeModel(GL.GL_SMOOTH);
gl.glEnable(GL.GL_TEXTURE_2D);
gl.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA);
gl.glEnable(GL.GL_BLEND);
box.initTexture(gl, "image/tex1.png");
}
Display method
public void display(GLAutoDrawable drawable) {
GL gl = drawable.getGL();
gl.glClear(GL.GL_COLOR_BUFFER_BIT);
gl.glLoadIdentity();
box.render(gl);
gl.glFlush();
}
Myclass.init
public boolean initTexture(GL gl,String textureName){
boolean ris=true;
try
{
Texture = TextureIO.newTexture(new File(textureName),false);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR);
}
catch (IOException ex)
{
System.err.println(ex);
ris=false;
}
return ris;
}
Myclass.render
public void render(GL gl){
gl.glTranslatef(X,Y,0.0f);
gl.glRotatef(Angle, 0.0f, 0.0f, 1.0f);
Texture.enable();
Texture.bind();
gl.glBegin(GL.GL_QUADS);
gl.glColor3i(1,1,1);
gl.glTexCoord2f(0, 0);
gl.glVertex2f(-Width/2,-Height/2);
gl.glTexCoord2i(0, 1);
gl.glVertex2f(-Width/2,Height/2);
gl.glTexCoord2i(1, 1);
gl.glVertex2f(Width/2,Height/2);
gl.glTexCoord2i(1, 0);
gl.glVertex2f(Width/2,-Height/2);
gl.glEnd();
gl.glLoadIdentity();
}
This code shows an empty screen and it doesn't give any sort of errors. I've tested this code without texture and it works just fine.
PS: the texture i'm using is in myprojectfolder\image\ and it is a 128 x 128 pixel png image
gl.glColor3i(1,1,1);
^^^^^^^^ wat
glColor():
...
Signed integer color components, when specified, are linearly mapped to floating-point values such that the most positive representable value maps to 1.0, and the most negative representable value maps to -1.0. (Note that this mapping does not convert 0 precisely to 0.0.)
...
So, you're setting the current color to something almost black (RGB(0,0,0)).
Combined with the default texenv of GL_MODULATE all your incoming texels will be multiplied by near-zero, giving near-zero.
Either switch to glColor3f(1,1,1) or GL_DECAL.