Why is glDrawElements() not drawing anything? - java

I've seen a couple of questions about the same topic but still couldn't figure out what was wrong with my code.
It worked to get a window running, where a keypress would change the background color with glClearColor().
But now, when trying to draw a quad onto the screen, the screen stays black.
A little summary of how I think it should work:
How mesh is created:
public Mesh(Vertex[] verts, int[] indices) {
this.verts = verts;
this.indices = indices;
}
//Differend class
public Mesh mesh = new Mesh(new Vertex[] {
new Vertex(new Vector3f(-0.5f, 0.5f, 0.0f)),
new Vertex(new Vector3f(-0.5f, -0.5f, 0.0f)),
new Vertex(new Vector3f( 0.5f, -0.5f, 0.0f)),
new Vertex(new Vector3f( 0.5f, 0.5f, 0.0f))
}, new int[] {
0, 1, 2,
0, 3, 2
});
For initializing the game, this is called:
public void create() {
vao = GL30.glGenVertexArrays();
GL30.glBindVertexArray(vao);
FloatBuffer positionBuffer = MemoryUtil.memAllocFloat(verts.length * 3);
float[] positionData = new float[verts.length * 3];
for (int i = 0; i < verts.length; i++) {
positionData[i * 3] = verts[i].getPosition().getX();
positionData[i * 3 + 1] = verts[i].getPosition().getY();
positionData[i * 3 + 2] = verts[i].getPosition().getZ();
}
positionBuffer.put(positionData).flip();
pbo = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, pbo);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, positionBuffer, GL15.GL_STATIC_DRAW);
GL20.glVertexAttribPointer(0, 3, GL11.GL_FLOAT, false, 0, 0);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
IntBuffer indexBuffer = MemoryUtil.memAllocInt(indices.length * 3);
indexBuffer.put(indices).flip();
ibo = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, ibo);
GL15.glBufferData(GL15.GL_ELEMENT_ARRAY_BUFFER, indexBuffer, GL15.GL_STATIC_DRAW);
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, 0);
}
And lastly, the mesh should be rendered with this:
public void renderMesh(Mesh mesh) {
clear();
GL30.glBindVertexArray(mesh.getVAO());
GL30.glEnableVertexAttribArray(0);
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, mesh.getIBO());
GL11.glDrawElements(GL11.GL_TRIANGLES, mesh.getIndices().length, GL11.GL_FLOAT, 0);
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, 0);
GL30.glDisableVertexAttribArray(0);
GL30.glBindVertexArray(0);
}
I've checked that the methods are actually invoked by using System.out.println.
The game's input still works. It's just that the screen is black and not showing any quad.
Why is it not drawing anything onto the screen?

The type argument which is set to glDrawElements hs to correspond to the index buffer, rather than the vertex buffer.
In your case it has to be GL_UNSIGNED_INT rather than GL_FLOAT. See glDrawElements.
GL11.glDrawElements(GL11.GL_TRIANGLES, mesh.getIndices().length, GL11.GL_FLOAT, 0);
GL11.glDrawElements(GL11.GL_TRIANGLES, mesh.getIndices().length, GL11.GL_UNSIGNED_INT, 0);
Note, GL_FLOAT is not an accepted value for glDrawElements and will cause an INVALID_ENUM error.

Related

Draw triangle VBO with and without shader

I am trying to draw a simple, red triangle on screen. Without using an VBO the code works as intended. When trying to draw it by using an VBO (with or without shader), it simply has no effect.
My code:
//Works
glBegin(GL_TRIANGLES);
glVertex3f(0f, 0f, 0.0f);
glVertex3f(0.5f, 0f, 0.0f);
glVertex3f(0.5f, 0.5f, 0.0f);
glEnd();
//Does not work
int vertexArrayID = glGenVertexArrays();
glBindVertexArray(vertexArrayID);
float[] g_vertex_buffer_data = new float[]{
0f, 0f, 0.0f,
0.5f, 00f, 0.0f,
0.5f, 0.5f, 0.0f,
};
int vertexbuffer = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, FloatBuffer.wrap(g_vertex_buffer_data), GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
3, // size
GL_FLOAT, // type
false, // normalized?
0, // stride
0 // array buffer offset
);
glDrawArrays(GL_TRIANGLES, 0, 3); // 3 indices starting at 0 -> 1 triangle
glDisableVertexAttribArray(0);
glDeleteBuffers(vertexbuffer);
glDeleteVertexArrays(vertexArrayID);
System.out.println(glGetError());
glGetError() always returns 0.
I use a default, tutorial-copied shader to test my code (I link and bind the program before using above code):
#version 330 core
// Input vertex data, different for all executions of this shader.
layout(location = 0) in vec3 vertexPosition_modelspace;
void main(){
gl_Position.xyz = vertexPosition_modelspace;
gl_Position.w = 1.0;
}
#version 330 core
// Ouput data
out vec3 color;
void main()
{
// Output color = red
color = vec3(1,0,0);
}

VBO and VAO not drawing opengl and LWJGL3

Ive been playing around with open GL for a while now and i got to the point that i can draw 3d shapes, My shapes and vertices and indices are definitely right and my shape was getting messed up. I am now wanting to redo my drawing. I used to only use VBO with no VAO and just bind and draw them. This worked but im suspicious of this being my bug. So i started using VAO's and i dont see anything that is wrong with my code and i still cant get it to draw my white square(no shaders just like the wiki tutorials).
My code for initializing the window is here:
private void initWindow() {
//Makes sure window can work
if (!glfwInit()) {
throw new IllegalStateException("Failed to Initialize GLFW!");
}
//Create window object and set its hints
glfwWindowHint(GLFW_VISIBLE, GLFW_FALSE);
this.windowRef = glfwCreateWindow(width, height, name, NULL, NULL);
if (windowRef == 0) {
throw new IllegalStateException("Failed to create Window!");
}
GLFWVidMode videoMode = glfwGetVideoMode(glfwGetPrimaryMonitor());
glfwSetWindowPos(windowRef, (videoMode.width() - width) / 2, (videoMode.height() - height) / 2);
// Make the OpenGL context current
glfwMakeContextCurrent(windowRef);
// Enable v-sync
glfwSwapInterval(1);
//Make GL capabilites for window
GL.createCapabilities();
glfwShowWindow(windowRef);
}
my code for initializing my buffers and objects is here
public void loadGL() {
float[] vertex = {
0f, 0f, 0f, //0
0.5f, 0, 0, //1
0.5f, 0, 0.5f, //2
0f, 0f, 0.5f, //3
0f, 0.5f, 0f, //4
0.5f, 0.5f, 0, //5
0.5f, 0.5f, 0.5f,//6
0f, 0.5f, 0.5f//7
};
int[] index = {
0, 1, 2, //0
0, 2, 3, //1
0, 3, 4, //2
3, 7, 4,//3
0, 4, 1,//4
1, 5, 4,//5
1, 5, 2,//6
2, 6, 5,//7
2, 3, 6,//8
3, 7, 6,//9
4, 5, 7,//10
5, 6, 7//11
};
size = 12*3;
indicesBuff = BufferUtils.createIntBuffer(index.length);
vertBuff = BufferUtils.createFloatBuffer(vertex.length);
indicesBuff.put(index);
vertBuff.put(vertex);
indicesBuff.flip();
vertBuff.flip();
vao = glGenVertexArrays();
glBindVertexArray(vao);
vboID = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glBufferData(GL_ARRAY_BUFFER, vertBuff, GL_STATIC_DRAW);
GL20.glVertexAttribPointer(0, 3, GL_FLOAT, false, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
ibo = glGenBuffers();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indicesBuff, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindVertexArray(0);
}
and finally my main loop is here, it is called after the window init:
private void mainLoop() {
loadGL();
glClearColor(0.5f, 0.5f, 0.5f, 1);
while (!glfwWindowShouldClose(windowRef)) {
//Render Stuff here
//TODO: later skip this block if nothing has changed
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // clear
the framebuffer
glBindVertexArray(vao);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glDrawElements(GL_TRIANGLES, size, GL_UNSIGNED_INT, 0);
// glBegin(GL_QUADS);
// glVertex3f(-0.5f, -0.5f, 0);
// glVertex3f(-0.5f, 0.5f, 0);
// glVertex3f(0.5f, -0.5f, 0);
// glVertex3f(0.5f, 0.5f, 0);
// glEnd();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glDisableVertexAttribArray(0);
glBindVertexArray(0);
glfwSwapBuffers(windowRef);
glfwPollEvents();
}
}
btw drawing it with glBegin and so works but its not efficient for what i want to do.
The modern way of rendering in OpenGL, would be to use a Shader program.
If you don't use a shader program, than you have to define the array of vertex data using the deprected way by glVertexPointer and you have to enable the client-side capability for vertex coordinates by glEnableClientState( GL_VERTEX_ARRAY ):
vao = glGenVertexArrays();
glBindVertexArray(vao);
vboID = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glBufferData(GL_ARRAY_BUFFER, vertBuff, GL_STATIC_DRAW);
GL20.glVertexPointer(3, GL_FLOAT, 0, 0); // <--------------
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(vao);
glEnableClientState( GL_VERTEX_ARRAY ); // <--------------
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glDrawElements(GL_TRIANGLES, size, GL_UNSIGNED_INT, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glDisableClientState( GL_VERTEX_ARRAY ); // <---------------
glBindVertexArray(0);
Further note, that the state of the client-side capability (or vertex attribute array) and the reference to the index (element) buffer is stored in the Vertex Array Objects state vector.
So it is sufficient to enable the vertex coordinates, when the vertex array object is specified and to bid the index buffer when the vertex array object is bound. The enabling and disabling of the vertex coordinates and binding of the index buffer, when drawing the geometry, can be omitted:
vao = glGenVertexArrays();
glBindVertexArray(vao);
vboID = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glBufferData(GL_ARRAY_BUFFER, vertBuff, GL_STATIC_DRAW);
GL20.glVertexPointer(3, GL_FLOAT, 0, 0); // <--------------
glEnableClientState( GL_VERTEX_ARRAY ); // <--------------
glBindBuffer(GL_ARRAY_BUFFER, 0);
ibo = glGenBuffers();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indicesBuff, GL_STATIC_DRAW);
// skip glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindVertexArray(0);
glBindVertexArray(vao);
glDrawElements(GL_TRIANGLES, size, GL_UNSIGNED_INT, 0);
glBindVertexArray(0);

Trying to draw a basic shape with LWJGL3

so i want to draw a shape using VAO and VBOs and i think im doing everything right but whenever i run my code i just get the window with the clear color. I had an issue before when i tried to initialize the triangles before i called create capabilities, am i missing some function to start drawing?
here is my code:
int vaoId, vboId, vertexCount;
float[] vertices = {
// Left bottom triangle
-0.5f, 0.5f,
-0.5f, -0.5f,
0.5f, -0.5f,};
private void init() {
if (!glfwInit()) {
throw new IllegalStateException("Failed to Initialize GLFW!");
}
int width = 1000;
int height = 1000;
glfwWindowHint(GLFW_VISIBLE, GLFW_FALSE);
window = glfwCreateWindow(width, height, "App", NULL, NULL);
if (window == 0) {
throw new IllegalStateException("Failed to create Window!");
}
GLFWVidMode videoMode = glfwGetVideoMode(glfwGetPrimaryMonitor());
glfwSetWindowPos(window, (videoMode.width() - width) / 2, (videoMode.height() - height) / 2);
// Make the OpenGL context current
glfwMakeContextCurrent(window);
// Enable v-sync
glfwSwapInterval(1);
glfwShowWindow(window);
}
private void loop() {
// This line is critical for LWJGL's interoperation with GLFW's
// OpenGL context, or any context that is managed externally.
// LWJGL detects the context that is current in the current thread,
// creates the GLCapabilities instance and makes the OpenGL
// bindings available for use.
GL.createCapabilities();
initTriangle();
// Run the rendering loop until the user has attempted to close
// the window or has pressed the ESCAPE key.
while (!glfwWindowShouldClose(window)) {
glClear(GL_COLOR_BUFFER_BIT); // clear the framebuffer
glBindVertexArray(vaoId);
glEnableVertexAttribArray(0);
glDrawArrays(GL_TRIANGLES, 0, vertexCount);
glDisableVertexAttribArray(0);
glBindVertexArray(0);
glfwSwapBuffers(window); // swap the color buffers
// Poll for window events. The key callback above will only be
// invoked during this call.
glfwPollEvents();
}
}
private void initTriangle() {
FloatBuffer vertBuf = MemoryUtil.memAllocFloat(vertices.length);
vertBuf.put(vertices);
vertBuf.flip();
vaoId = glGenVertexArrays();
glBindVertexArray(vaoId);
vboId = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vboId);
glBufferData(GL_ARRAY_BUFFER, vertBuf, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, false, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
}
I hope you guys can help, thanks a lot.
I am not using shaders. Is that my problem? And is it a necessity
The state of the art way of rendering in OpenGL, would be to use a Shader.
If you don't use a shader, than you have to define the array of vertex data by glVertexPointer
vaoId = glGenVertexArrays();
glBindVertexArray(vaoId);
vboId = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vboId);
glBufferData(GL_ARRAY_BUFFER, vertBuf, GL_STATIC_DRAW);
glVertexPointer( 2, GL_FLOAT, 0, 0 ); // <---------------
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
and you have to enable the client-side capability for vertex coordinates by glEnableClientState( GL_VERTEX_ARRAY ):
glBindVertexArray(vaoId);
glEnableClientState( GL_VERTEX_ARRAY ); // <---------------
glDrawArrays(GL_TRIANGLES, 0, vertexCount);
glDisableClientState( GL_VERTEX_ARRAY ); // <---------------
glBindVertexArray(0);
Note, this state of the client-side capability (or vertex attribute array) is stored in the Vertex Array Object.
So it is sufficient to enable the vertex coordinates, when the vertex array object is specified. The enabling and disabling of the vertex coordinates, when drawing the geometry, can be omitted:
vaoId = glGenVertexArrays();
glBindVertexArray(vaoId);
vboId = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vboId);
glBufferData(GL_ARRAY_BUFFER, vertBuf, GL_STATIC_DRAW);
glVertexPointer( 2, GL_FLOAT, 0, 0 );
glEnableClientState( GL_VERTEX_ARRAY ); // <---------------
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
glBindVertexArray(vaoId);
glDrawArrays(GL_TRIANGLES, 0, vertexCount);
glBindVertexArray(0);

Trying to draw a model with Gouraud shading in jogl for desktop, but is flat

I already have a drawn model, but it has flat shading (for what I understand it should be smooth by default...)
This is the initial config:
private void SetLightningAndMaterials(){
//float[] lightPos = {1, 1, 1, 0};
float[] lightPos = {0, 0, 1, 0};
float[] lightColorDiffuse = {1, 1, 1, 1};
float[] lightColorAmbient = {0.2f, 0.2f, 0.2f, 1};
gl.glShadeModel(GL.GL_SMOOTH);
gl.glLightfv(GL.GL_LIGHT1, GL.GL_POSITION, lightPos, 0);
gl.glLightfv(GL.GL_LIGHT1, GL.GL_DIFFUSE, lightColorDiffuse, 0);
gl.glLightfv(GL.GL_LIGHT1, GL.GL_AMBIENT, lightColorAmbient, 0);
gl.glEnable(GL.GL_LIGHT1);
gl.glEnable(GL.GL_LIGHTING);
gl.glMaterialfv(GL.GL_FRONT, GL.GL_AMBIENT, ambientColour, 0);
gl.glMaterialfv(GL.GL_FRONT, GL.GL_DIFFUSE, mesh.colour, 0);
gl.glEnable(GL.GL_LIGHTING);
gl.glEnable(GL.GL_LIGHT0);
float[] noAmbient =
{ 0.1f, 0.1f, 0.1f, 1f }; // low ambient light
float[] spec =
{ 1f, 0.6f, 0f, 1f }; // low ambient light
float[] diffuse =
{ 0.5f, 0.5f, 0.5f, 1f };
gl.glLightfv(GL.GL_LIGHT0, GL.GL_AMBIENT, noAmbient, 0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_SPECULAR, spec, 0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_DIFFUSE, diffuse, 0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_POSITION, new float[]{0,0,10,1}, 0);
}
And this is how I draw the model:
public void Draw(GL gl, GLU glu){
Vec3d normal;
MassPoint vertex1, vertex2, vertex3;
int faceIndex=0;
Face surfaceFace;
for (faceIndex=0; faceIndex<surfaceFaces.size();faceIndex++){
surfaceFace = surfaceFaces.get(faceIndex);
surfaceFace.recalculateNormal();
vertex1 = surfaceFace.vertex1;
vertex2 = surfaceFace.vertex2;
vertex3 = surfaceFace.vertex3;
normal = surfaceFace.normal;
gl.glBegin(gl.GL_TRIANGLES);
gl.glNormal3d(normal.x, normal.y, normal.z);
gl.glMaterialfv(GL.GL_FRONT, GL.GL_DIFFUSE, colour, 0);
gl.glVertex3d(vertex1.position.x, vertex1.position.y, vertex1.position.z);
gl.glVertex3d(vertex2.position.x, vertex2.position.y, vertex2.position.z);
gl.glVertex3d(vertex3.position.x, vertex3.position.y, vertex3.position.z);
gl.glEnd();
}
}
I want to believe there's an easy way of solving this without having to create a shader (I don't have any idea how to set these in Java).
I'm using JOGL 1 by the way, and is probably an old version (the imports are like javax.media.opengl.*).
I managed to solve the problem. For smoothness to work, the drawing expects 3 normals (one per vertex), I was only passing 1 normal (one per face).
Here's the new code for the drawing:
public void Draw(GL gl, GLU glu) {
Vec3d[] normalsPerVertex = new Vec3d[3];
MassPoint vertex1, vertex2, vertex3;
int faceIndex=0;
Face surfaceFace;
for (faceIndex=0; faceIndex<surfaceFaces.size();faceIndex++){
surfaceFace = surfaceFaces.get(faceIndex);
vertex1=surfaceFace.vertex1;
normalsPerVertex[0] = vertex1.CalcNormal();
vertex2=surfaceFace.vertex2;
normalsPerVertex[1] = vertex2.CalcNormal();
vertex3=surfaceFace.vertex3;
normalsPerVertex[2] = vertex3.CalcNormal();
gl.glBegin(GL.GL_TRIANGLES);
gl.glNormal3d(normalsPerVertex[0].x, normalsPerVertex[0].y, normalsPerVertex[0].z);
gl.glVertex3d(vertex1.position.x, vertex1.position.y, vertex1.position.z);
gl.glNormal3d(normalsPerVertex[1].x, normalsPerVertex[1].y, normalsPerVertex[1].z);
gl.glVertex3d(vertex2.position.x, vertex2.position.y, vertex2.position.z);
gl.glNormal3d(normalsPerVertex[2].x, normalsPerVertex[2].y, normalsPerVertex[2].z);
gl.glVertex3d(vertex3.position.x, vertex3.position.y, vertex3.position.z);
gl.glEnd();
}
}
The calculated normal for each vertex is the media of all the faces connected to that vertex. Here's the code for that:
public Vec3d CalcNormal() {
Vec3d normalMedia = new Vec3d();
for (Face face : facesRelated) {
face.recalculateNormal();
normalMedia.add(face.normal);
}
normalMedia.mul(1d/facesRelated.size());
return normalMedia;
}
Hope this helps someone else.

Vertex Attribute Arrays\not receiving vertex data in LWJGL

Cannot seem to get Vertex Attribute Arrays working properly for per vertex data.
Here's the SSCCE:
private static void createDisplay(int w, int h) {
try {
Display.create();
Display.setDisplayMode(new DisplayMode(w, h));
}
catch (LWJGLException e) {
e.printStackTrace();
}
float size = 1;
float aspect = (float) Display.getWidth() / Display.getHeight();
GL11.glMatrixMode(GL11.GL_PROJECTION);
GL11.glLoadIdentity();
GL11.glOrtho(-size * aspect, size * aspect, -size, size, -1, 1);
GL11.glMatrixMode(GL11.GL_MODELVIEW);
GL11.glLoadIdentity();
}
public static void main(String[] args) {
createDisplay(1200, 800);
GL11.glViewport(0, 0, Display.getWidth(), Display.getHeight());
ShaderManager.createShader("2Dv", new File("src/Shaders/2D.vert"), SHADER_VERT);
ShaderManager.createShader("2Df", new File("src/Shaders/2D.frag"), SHADER_FRAG);
ShaderManager.createProgram("2D", "2Dv", "2Df");
// Shader compiles and links correctly.
ShaderManager.useProgram("2D");
// Calls glUseProgram(programID);
float[] vertexData = new float[] {-0.5f, -0.5f, 0.5f, -0.5f, 0.5f, 0.5f, -0.5f, 0.5f};
int vao = GL30.glGenVertexArrays();
if (vao == 0)
System.exit(-1);
GL30.glBindVertexArray(vao);
int vertexBuffer = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vertexBuffer);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, BufferUtil.asDirectFloatBuffer(vertexData), GL15.GL_DYNAMIC_DRAW);
// GL11.glEnableClientState(GL11.GL_VERTEX_ARRAY);
// GL11.glVertexPointer(2, GL11.GL_FLOAT, 2 * 4, 0);
int loc = ShaderManager.currentProgram.getAttribute("vertex");
if (loc == -1)
Debug.log(Debug.INSTANCE_MANAGEMENT, "Attribute [", "", "] not found in Shader [",
ShaderManager.currentProgram.toString(), "].");
else {
GL20.glVertexAttribPointer(loc, 2, GLCONST.TYPE_FLOAT, false, 2 * 4, 0);
GL20.glEnableVertexAttribArray(loc);
}
GL30.glBindVertexArray(0);
GL11.glColor3f(1, 0, 0);
GL11.glClearColor(0.5f, 0.5f, 0.8f, 1);
int indexBuffer = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, indexBuffer);
GL15.glBufferData(GL15.GL_ELEMENT_ARRAY_BUFFER, BufferUtil.asDirectFloatBuffer(new float[] {0, 1, 2, 3}),
GL15.GL_DYNAMIC_DRAW);
while (!Display.isCloseRequested()) {
GL11.glClear(GL11.GL_COLOR_BUFFER_BIT);
GL30.glBindVertexArray(vao);
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, indexBuffer);
GL11.glDrawElements(GL11.GL_QUADS, 4, GL11.GL_UNSIGNED_INT, 0);
GL30.glBindVertexArray(0);
Display.update();
int error = GL11.glGetError();
if (error != GL11.GL_NO_ERROR)
System.out.println(GLU.gluErrorString(error));
}
}
The problem lies in the usage of vertex Attribute arrays. The old code I used was:
GL11.glEnableClientState(GL11.GL_VERTEX_ARRAY);
GL11.glVertexPointer(2, GL11.GL_FLOAT, 2 * 4, 0);
The new version is:
int loc = ShaderManager.currentProgram.getAttribute("vertex");//Call to glGetAttribLocation();
if (loc == -1){
System.exit(-1);
}
GL20.glVertexAttribPointer(loc, 2, GL11.GL_FLOAT, false, 2 * 4, 0);
GL20.glEnableVertexAttribArray(loc);
The original code was the commented 2 lines. Upon running, this correctly sent vertex data to gl_Vertex and rendered a square of size 1.
The new code shuld send vertex data to the vertex attribute, but it gets nothing.
When the original code is uncommented and both old and new code used, both gl_Vertex and vertex attribute get vertex data.
What is going wrong here?
So I figured out the problem after a while. The problem is due to an AMD driver bug when using a OpenGL 3.0+ core profile.
The "vertex" attribute array was assigned a location of 1.
The bug occurs when the attribute array 0 is unused. Nothing is rendered if array 0 is not enabled.
To fix this probelm I simply explicitly assigned "vertex" to location 0.
layout(location = 0) in vec4 vertex;

Categories

Resources