In LWJGL 3.2.1 I'm drawing a triangle using vertices and viewing it with a perspective (frustum) projection. I'm trying to use the triangle to orientate myself in 3D space to figure out where I am and how to direct myself. When I rotate the triangle, it gets smaller instead of rotating around the set point.
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glFrustum(0, windowWidth, windowHeight, 0, 0, 100);
glMatrixMode(GL_MODELVIEW);
while (!glfwWindowShouldClose(window))
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glPushMatrix();
glMatrixMode(GL_PROJECTION);
glRotatef(5f, 0f, 0f, 0f);
glMatrixMode(GL_MODELVIEW);
glBegin(GL_POLYGON);
glVertex3f(-0f, -0f, 0f);
glVertex3f(0f, 1f, 0f);
glVertex3f(1f, 1f, 0f);
glEnd();
glPopMatrix();
glfwSwapBuffers(window);
glfwPollEvents();
}
Am I rotating it incorrectly or something?
Related
I have a method drawTriangle which is referenced in the display() method of an OpenGL program in JAVA.
public void drawTriangle(GL gl, int x1, int y1, int x2, int y2, int x3, int y3){
gl.glVertex2d(x1, y1);
gl.glVertex2d(x2, y2);
gl.glVertex2d(x3, y3);
}
I then call the triangle in Display()
public void display(GLAutoDrawable drawable){
GL gl = drawable.getGL();
gl.glClear(GL.GL_COLOR_BUFFER_BIT);
gl.glColor3f(0.4f, 1.0f, 0.4f);
gl.glBegin(GL.GL_TRIANGLES);
gl.glPushMatrix();
drawTriangle(gl,0,0,0,20,100,10);
gl.glTranslatef(0f, 0f, 0f);
gl.glRotatef(90f, 0f, 0f, 1f);
gl.glPopMatrix();
}
This then draws a lovely red Triangle.
I can't change the position or rotation of the triangle.
It just spawns it here:
There are a few problems with the code. First of all, gl.glBegin(...) should be followed by gl.glEnd(). Keep the drawing functions, such as, drawTriangle(...) within these two, and any matrix operations outside.
public void display(GLAutoDrawable drawable)
{
GL gl = drawable.getGL();
gl.glClear(GL.GL_COLOR_BUFFER_BIT);
gl.glColor3f(0.4f, 1.0f, 0.4f);
gl.glPushMatrix();
gl.glTranslatef(0f, 0f, 0f);
gl.glRotatef(90f, 0f, 0f, 1f);
gl.glBegin(GL.GL_TRIANGLES);
drawTriangle(gl,0,0,0,20,100,10);
gl.glEnd();
gl.glPopMatrix();
}
You're applying the transformation after rendering. This does not work!
first set up your matrices (i.e. do the transformation)
then render
finally popMatrix to restore the original matrix
i.e.:
public void display(GLAutoDrawable drawable)
{
GL gl = drawable.getGL();
gl.glClear(GL.GL_COLOR_BUFFER_BIT);
gl.glPushMatrix();
gl.glTranslatef(0f, 0f, 0f);
gl.glRotatef(90f, 0f, 0f, 1f);
gl.glBegin(GL.GL_TRIANGLES);
gl.glColor3f(0.4f, 1.0f, 0.4f);
drawTriangle(gl,0,0,0,20,100,10);
gl.glEnd();
gl.glPopMatrix();
}
My example rotates a quad, I assume that you're able to modify it to rotate a triangle:
https://gist.github.com/gouessej/3420e2b6f632efdddf98
It's quoted on Wikipedia and in the JogAmp wiki. Improve your Yacy/DuckDuckGo/IxQuick/Seeks skills next time ;)
I have been trying to render a VBO that is 1/3 of the size of the screen resolution of the screen to a Quad that is the size of the screen. What am I doing wrong?
public void initGL() {
frameBufferID = glGenFramebuffersEXT();
colorBufferID = glGenTextures();
depthBufferID = glGenRenderbuffersEXT();
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, frameBufferID);
glBindTexture(GL_TEXTURE_2D, colorBufferID);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, WIDTH / SCALE, HEIGHT / SCALE, 0, GL_RGBA, GL_INT, (java.nio.ByteBuffer)null);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, colorBufferID, 0);
glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, depthBufferID);
glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL14.GL_DEPTH_COMPONENT24, WIDTH / SCALE, HEIGHT / SCALE);
glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, depthBufferID);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
}
public void render() {
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
GLU.gluPerspective(90.0f, WIDTH/(float)HEIGHT, 0.001f, 1000.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glViewport(0, 0, WIDTH / SCALE, HEIGHT / SCALE);
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, frameBufferID);
glClearColor(1.0f, 0.0f, 0.0f, 0.5f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glColor3f(1, 1, 1);
// Render game code here
glBegin(GL_LINES);
{
glVertex2f(0, 0);
glVertex2f(1, 1);
}
glEnd();
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
glOrtho(0, WIDTH, 0, HEIGHT, 0.001f, 1000.0f);
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
glBindTexture(GL_TEXTURE_2D, colorBufferID);
glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glViewport(0, 0, WIDTH, HEIGHT);
glTranslatef(0, 0, -1);
//Draw Quad
glBegin(GL_QUADS);
{
glTexCoord2f(0, 0);
glVertex2f(0, 0);
glTexCoord2f(1, 0);
glVertex2f(WIDTH, 0);
glTexCoord2f(1, 1);
glVertex2f(WIDTH, HEIGHT);
glTexCoord2f(0, 1);
glVertex2f(0, HEIGHT);
}
glEnd();
glPopMatrix();
glMatrixMode(GL_PROJECTION);
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
}
I'm trying to render the things in the VBO in Perspective, and then draw the fullscreen Quad in Orthographic. But everything I've tried doesn't work... Like at all. Is there anything I've screwed up?
You are rendering a quad with a completely red texture in front of a red background, so there is not much to see.
The lines you are trying to render into the texture are actually outside of the frustum, as you draw in the z=0 plane, while your frustum is in the range z=[-0.001,-1000].
For debugging purposes, I recommend to change the clear color between your two draw passes, so that you can better see which of the two passes fails.
For some reason I cannot fo the life of me figuire out what i'm doing wrong here. I am tring to render a 2d orthographic hud ontop of my 3d stuff which is rendered via shaders not draw calls. I've spent almost 8 hours and well I suck, please help. There are no errors in the console!
EngineWindow.bindAsRenderTarget(); //Set render target to display.
glDepthMask(false);
glDisable(GL_DEPTH_TEST);
glDisable(GL_CULL_FACE);
glDisable(GL_DEPTH_CLAMP);
glEnable(GL_BLEND);
glEnable(GL_TEXTURE_2D);
glPushMatrix();
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, width, height, 0, -1, 1);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
System.out.println(this.x + " " + this.y + " " + this.width + " " + this.height); //Its at the right position too.
GL13.glActiveTexture(GL13.GL_TEXTURE0);
texture.bind(); //Binds blue texture with writing.
glBegin(GL_QUADS);
glVertex2i(x, y);
glTexCoord2f(0f, 0f);
glVertex2i(x, y+height);
glTexCoord2f(0f, 1f);
glVertex2i(x+width, y+height);
glTexCoord2f(1f, 1f);
glVertex2i(x+width, y);
glTexCoord2f(1f, 0f);
glEnd();
glPopMatrix();
glDepthFunc(GL_LESS);
glDepthMask(true);
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glDisable(GL_BLEND);
glEnable(GL_DEPTH_CLAMP);
I was doing and creating a triangle in the lwjgl openGL but it doesnt display the square and the triangle that I specify. I am stuck and I cant seemingly make it work, I am new to openGL lwjgl. why is it not drawing on the screen?
public Cube3D() {
try {
Display.setDisplayMode(new DisplayMode(640,480));
Display.setTitle("Gaming");
Display.create();
} catch (LWJGLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//initiallized code OPENGL
glShadeModel(GL_SMOOTH);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClearDepth(1.0);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
glViewport(0, 0, 640, 480);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
while(!Display.isCloseRequested()) { //Reset The View
glTranslatef(-1.5f,0.0f,-8.0f); // Move Left 1.5 Units And Into
// The Screen 8 (not 6.0 like
// VC../ not sure why)
glBegin(GL_TRIANGLES); // Drawing Using Triangles
glVertex3f( 0.0f, 1.0f, 0.0f); // Top
glVertex3f(-1.0f,-1.0f, 0.0f); // Bottom Left
glVertex3f( 1.0f,-1.0f, 0.0f); // Bottom Right
glEnd(); // Finished Drawing The Triangle
glTranslatef(3.0f,0.0f,0.0f); // Move Right 3 Units
glBegin(GL_QUADS); // Draw A Quad
glVertex3f(-1.0f, 1.0f, 0.0f); // Top Left
glVertex3f( 1.0f, 1.0f, 0.0f); // Top Right
glVertex3f( 1.0f,-1.0f, 0.0f); // Bottom Right
glVertex3f(-1.0f,-1.0f, 0.0f); // Bottom Left
glEnd();
Display.update();
Display.sync(60);
}
Display.destroy();
}
}
It took a little bit of time but I manage to find the problem. The first is how you initialize the opengl.
Replace:
glShadeModel(GL_SMOOTH);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClearDepth(1.0);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
glViewport(0, 0, 640, 480);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
with:
glMatrixMode(GL11.GL_PROJECTION);
glLoadIdentity();
GLU.gluPerspective(45.0f, ((float) 800) / ((float) 600), 0.1f, 100.0f);
glMatrixMode(GL11.GL_MODELVIEW);
glLoadIdentity();
glEnable(GL11.GL_DEPTH_TEST);
replacing 800 with the width of the window and 600 with the height should you change the resolution.
and in your while loop put these two lines at the beginning:
GL11.glClear(GL11.GL_COLOR_BUFFER_BIT | GL11.GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
I'm trying to draw a gradient and then draw a transparent texture ontop of it.
This is the code I'm using right now:
GL11.glClearColor(0.0F, 0.0F, 0.0F, 0.0F);
// Draw the gradient
GL11.glBegin(GL11.GL_QUADS);
GL11.glTexCoord2f(0.0f, 0.0f);
GL11.glColor4f(1F, 1F, 1F, 0F);
GL11.glVertex3f(0, 0, 0.0f);
GL11.glTexCoord2f(1.0f, 0.0f);
GL11.glColor4f(0F, 1F, 1F, 0F);
GL11.glVertex3f(0 + gameWidth, 0, 0.0f);
GL11.glTexCoord2f(1.0f, 1.0f);
GL11.glColor4f(0F, 0F, 1F, 0F);
GL11.glVertex3f(0 + gameWidth, 0 + gameHeight, 0.0f);
GL11.glTexCoord2f(0.0f, 1.0f);
GL11.glColor4f(1F, 0F, 1F, 0F);
GL11.glVertex3f(0, 0 + gameHeight, 0.0f);
GL11.glEnd();
GL11.glColor4f(1F, 1F, 1F, 0F);
GL11.glEnable(GL11.GL_BLEND);
GL11.glBlendFunc(GL11.GL_ONE, GL11.GL_ONE_MINUS_SRC_ALPHA);
TexLoader.loadTex("/example.png"); // Loads and binds the texture, also enables GL_TEXTURE_2D
GL11.glBegin(GL11.GL_QUADS);
GL11.glTexCoord2f(0.0f, 0.0f);
GL11.glVertex3f(parX, parY, 0.0f);
GL11.glTexCoord2f(1.0f, 0.0f);
GL11.glVertex3f(parX + parWidth, parY, 0.0f);
GL11.glTexCoord2f(1.0f, 1.0f);
GL11.glVertex3f(parX + parWidth, parY + parHeight, 0.0f);
GL11.glTexCoord2f(0.0f, 1.0f);
GL11.glVertex3f(parX, parY + parHeight, 0.0f);
GL11.glEnd();
GL11.glDisable(GL11.GL_TEXTURE_2D);
GL11.glDisable(GL11.GL_BLEND);
However, instead of drawing the texture and hiding the transparent pixels it just draws a white quad.
What am I doing wrong?