How to draw textures and use VBO's? - java

Currently I have a working system of VBO's where I have two buffers - one for position and one for color. However, I want to draw textures instead of colors. How can I draw textures over my VBO position buffer? This is in Java and LWJGL, but an example in any language helps.

I want to add textures to my VBO. How would I go about doing that?
You don't. At least you're not adding texture images to the VBO. What you add is a new attribute, called the texture coordinate, that assigns each vertex the location of an texture image.
The texture itself is an independent object, created using glGenTextures, glBindTexture and glTexImage….

Here is a simple code snippet to initialize and render a cube with a texture. Uses Slick2D library.
int vertexHandle;
int textureHandle;
private void init() throws IOException {
Camera.init();
texture = TextureLoader.getTexture("PNG", ResourceLoader.getResourceAsStream("test.png"));
this.initialize3D();
vertexHandle = GL15.glGenBuffers();
FloatBuffer positionData = BufferUtils.createFloatBuffer(72);
// Initalize position data.
positionData.flip();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER,vertexHandle);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER,positionData,GL15.GL_STATIC_DRAW);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER,0);
FloatBuffer textureData = BufferUtils.createFloatBuffer(72);
// Initialize texture data.
textureBuffer.flip();
textureHandle = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, textureHandle);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, textureData, GL15.GL_STATIC_DRAW);
GL11.glTexCoordPointer(3, GL11.GL_FLOAT, 0, 0);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
GL11.glDisableClientState(GL11.GL_TEXTURE_COORD_ARRAY);
}
public void render() {
GL11.glClear(GL11.GL_COLOR_BUFFER_BIT | GL11.GL_DEPTH_BUFFER_BIT);
GL11.glLoadIdentity();
texture.bind();
GL11.glPushMatrix();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vertexHandle);
GL11.glVertexPointer(3, GL11.GL_FLOAT, 0, 0L);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, textureHandle);
GL11.glVertexPointer(3, GL11.GL_FLOAT, 0, 0L);
GL11.glDrawArrays(GL11.GL_QUADS, 0, 24);
GL11.glPopMatrix();
}
In addition you may need to integrate parts of this method to initialize 3D rendering:
public void initialize3D() {
GL11.glEnable(GL11.GL_TEXTURE_2D); // Allows 2D textures.
GL11.glShadeModel(GL11.GL_SMOOTH); // Smoother textures.
//GL11.glClearColor(0.4f,0.6f,1.0f,0.0f); // BG color. 6698FF
GL11.glClearDepth(1.0); // Buffer depth, allows objects to draw over things behind them.
GL11.glEnable(GL11.GL_DEPTH_TEST); // Depth testing (see above).
GL11.glDepthFunc(GL11.GL_LEQUAL); // Type of depth testing.
GL11.glEnableClientState(GL11.GL_VERTEX_ARRAY);
//GL11.glEnableClientState(GL11.GL_COLOR_ARRAY);
GL11.glEnableClientState(GL11.GL_TEXTURE_COORD_ARRAY);
GL11.glMatrixMode(GL11.GL_PROJECTION); // Sets matrix mode to displaying pixels.
GL11.glLoadIdentity(); // Loads the above matrix mode.
// Sets default perspective location. Render Distances: Min Max
GLU.gluPerspective(45.0f,(float)Display.getWidth()/(float)Display.getHeight(),0.1f,300.0f);
GL11.glMatrixMode(GL11.GL_MODELVIEW); // Sets the matrix to displaying objects.
GL11.glHint(GL11.GL_PERSPECTIVE_CORRECTION_HINT,GL11.GL_NICEST); // Something unimportant for quality.
}

Related

OpenGL rendering to texture produces empty Texture

I am trying to render and orhto projection of my scenes depth values to a texture inorder to use the texture in a later render cylce to determine what fragments are in shadow. Basically a Shadow Map.
However the texture that I am rendering to ends up being uniformly empty. Given that i can only really test it in a shader i am limited to what output i can generate. However it seems that all my z values in the Texture are 0.
Here is the code that generates the Texture(Width and height are 1024 and pixelFormat is GL_DEPTH_COMPONENT):
this.id = glGenTextures();
glBindTexture(GL_TEXTURE_2D, id);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, width, height, 0, pixelFormat, GL_FLOAT, (ByteBuffer) null);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
return id;
Here I create the FrameBuffer and attach the Texture:
// Create a FBO to render the depth
this.depthMapFBO = glGenFramebuffers();
// Create the depth map texture
this.depthMap = new Texture(SHADOW_MAP_WIDTH, SHADOW_MAP_HEIGHT, GL_DEPTH_COMPONENT);
// Attach the the depth map texture to the FBO
glBindFramebuffer(GL_FRAMEBUFFER, depthMapFBO);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, this.depthMap.getId(), 0);
// Set only depth
glDrawBuffer(GL_NONE);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
throw new Exception("Could not create FrameBuffer" +glCheckFramebufferStatus(GL_FRAMEBUFFER));
}
// Unbind
glBindFramebuffer(GL_FRAMEBUFFER, 0);
Before I render my Scene I call this function to render the depth to the texture:
if(shaderMap.containsKey("shadow")){
shaderprogram = shaderMap.get("shadow");
}
shaderprogram.bind();
Sun sun = resourceManager.getSun();
Matrix4f LightViewMatrix = transformation.getLightViewMatrix(sun);
Matrix4f modelLightViewMatrix = transformation.getModelViewMatrix(object, LightViewMatrix);
shaderprogram.setUniform("modelLightViewMatrix",modelLightViewMatrix);
glBindFramebuffer(GL_FRAMEBUFFER,this.shadowmap.getDepthMapFBO());
glViewport(0, 0, 1024, 1024);
glClear(GL_DEPTH_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
this.shadowmap.getDepthMapTexture().bind();
glPolygonMode( GL_FRONT_AND_BACK, GL_FILL );
glBindVertexArray(object.getMesh().getVaoId());
glEnableVertexAttribArray(0);//Vertex positions
glEnableVertexAttribArray(1);//Color Positions
glEnableVertexAttribArray(2);//Normals
glDrawElements(GL_TRIANGLES, object.getMesh().getVertexcount(),GL_UNSIGNED_INT ,0);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(2);
glBindVertexArray(0);
glBindTexture(GL_TEXTURE_2D,0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
shaderprogram.unbind();
I can post the Matrices for OrthogonalViewMatrix and LightViewMatrix if needed but i did test them and rendered my scene with them and it gives the desired effect of the Camera flying over the Terrain and looking at the center of the map. Basically how you would imagine the scene to look like if the camera was the sun. So I dont think there is anything wrong with them.
This is my second render with normal projections. Basically the normal Camera:
shaderprogram.createUniform("shadowMap");
glActiveTexture(GL_TEXTURE4);
this.shadowmap.getDepthMapTexture().bind();
shaderprogram.setUniform("shadowMap", 4);
glPolygonMode( GL_FRONT_AND_BACK, GL_FILL );
glBindVertexArray(object.getMesh().getVaoId());
glEnableVertexAttribArray(0);//Vertex positions
glEnableVertexAttribArray(1);//Color Positions
glEnableVertexAttribArray(2);//Normals
glDrawElements(GL_TRIANGLES, object.getMesh().getVertexcount(),GL_UNSIGNED_INT ,0);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(2);
glBindVertexArray(0);
glBindTexture(GL_TEXTURE_2D,0);
shaderprogram.unbind();
Some parts are left out but I think those are the most important Code parts where the Error might be.
Here is the vertex and the fragment shader that is used in the first render cycle for the shadowmap:
#version 330
layout (location=0) in vec3 position;
layout (location=1) in vec2 texCoord;
layout (location=2) in vec3 vertexNormal;
uniform mat4 modelLightViewMatrix;
uniform mat4 orthoProjectionMatrix;
void main()
{
gl_Position = orthoProjectionMatrix * modelLightViewMatrix * vec4(position, 1.0f);
}
I know i am not using the the texCoords and the vertexNormal.
Here the fragment shader:
#version 330
void main()
{
gl_FragDepth = gl_FragCoord.z;
}
It should just save the Fragments Depth value.
And here the part of the normal scenes fragment shader:
float shadowfactor = 0;
vec3 projCoords = mlightviewVertexPos.xyz;
projCoords = projCoords * 0.5 + 0.5;
if (projCoords.z < texture(shadowMap,projCoords.xy).r){
// Current fragment is not in shade
shadowfactor = 1;
}else{
shadowfactor = 0.5;
}
color = color * (vec4(1,1,1,1)* shadowfactor);
fragColor = color;
Im inputing the orthoMatrix and the LightViewMatrix to determine where the fragment would be in the Suns POV and checking the Z Value in that part of the Texture.
The Problem is that shadowfactor seems to be a uniformly black texture. I tried assigning the texture(shadowMap,projCoords.xy).r directly to the fragment to see if there are any differences anywhere but it is all the same black color eg. 0.
I also tried to use the ShadowMap texture directly on the terrain to see if there is anything on there but I also only get a black Texture.
I am aware that this is a very long question but I tried debugging it for the last 2 days and cant find the error. My guess is that I m either not binding the Texture right or that the wrong FrameBuffer is used in the render cycle.
Hopefully someone wants to help and can find the Error.
Thank you for your time in advance,
Alex

Texture appears white

I have a textured skydome. It renders white when an image is attached, but it does renders right when a color is given. I have reasons to assume the texture is overwritten, thus some tips on this would be great. It used to work fine, displaying the texture appropriately.
EDIT: If I print the texture directly to the fbo, it does show the texture. However when I map it to the sphere it shows up white. Give the sphere a color, and it shows correctly with the color. Also for the record, white is not the clear color. And I use an image that's quite large (3000x1000~).
ADD: No errors are given anywhere.
Changing:
glActiveTextureARB(GL_TEXTURE6_ARB);
glCallList(SkySphere.getDisplayList());
To:
glActiveTextureARB(GL_TEXTURE0_ARB);
glCallList(SkySphere.getDisplayList());
displays the proper image once, first cycle, then, white again.
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fboId);
glViewport(0,0,screenWidth,screenHeight);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(90.0f, ((float)screenWidth/(float)screenHeight),0.1f,100.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glShadeModel(GL_SMOOTH);
glHint(GL_PERSPECTIVE_CORRECTION_HINT,
GL_NICEST);
glDisable(GL_DEPTH_TEST);
glClearColor(1.0f,1.0f,0.0f,1.0f);
glClear (GL_COLOR_BUFFER_BIT);
glLoadIdentity ();
camera.look();
glEnable(GL_TEXTURE_2D);
glDisable(GL_LIGHTING);
glActiveTextureARB(GL_TEXTURE6_ARB);
glCallList(SkySphere.getDisplayList());
glDisable(GL_TEXTURE_2D);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
This is the skysphere code:
public static int loadTexture(String filename) {
ByteBuffer buf = null;
int tWidth = 0;
int tHeight = 0;
.. load png into buffer..
// Create a new texture object in memory and bind it
textureId = GL11.glGenTextures();
GL11.glBindTexture(GL11.GL_TEXTURE_2D, textureId);
// All RGB bytes are aligned to each other and each component is 1 byte
GL11.glPixelStorei(GL11.GL_UNPACK_ALIGNMENT, 1);
// Upload the texture data and generate mip maps (for scaling)
GL11.glTexImage2D(GL11.GL_TEXTURE_2D, 0, GL11.GL_RGB, tWidth, tHeight, 0,
GL11.GL_RGBA, GL11.GL_UNSIGNED_BYTE, buf);
// Setup what to do when the texture has to be scaled
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MAG_FILTER,
GL11.GL_NEAREST);
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MIN_FILTER,
GL11.GL_LINEAR);
return textureId;
}
public static int getDisplayList() {
return displayList;
}
public static int makeSphere() {
Sphere s = new Sphere(); // an LWJGL class for drawing sphere
s.setOrientation(GLU.GLU_INSIDE); // normals point inwards
s.setTextureFlag(true); // generate texture coords
displayList = GL11.glGenLists(1);
GL11.glNewList(displayList, GL11.GL_COMPILE);
{
GL11.glPushMatrix();
{
GL11.glBindTexture(GL11.GL_TEXTURE_2D, getTextureId());
//GL11.glTranslatef(0,0,0);
GL11.glRotatef(90f, 1,0,0); // rotate the sphere to align the axis vertically
s.draw(1, 48, 48); // run GL commands to draw sphere
}
GL11.glPopMatrix();
}
GL11.glEndList();
return displayList;
}
In initGL:
SkySphere.createShader();
SkySphere.loadTexture("textures/panorama2.png");
SkySphere.makeSphere();
Also I'm doing most of my work in framebuffers:
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, modelsFboId);
And in one occasion copy the depth to a texture:
glActiveTextureARB(GL_TEXTURE3_ARB);
glBindTexture(GL_TEXTURE_2D, modelsDepthTextureId);
glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, 0, 0, screenWidth, screenHeight);
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
I used
glPushAttrib(GL_ALL_ATTRIB_BITS);
at the beginning and
glPopAttrib();
at the end to reset the OpenGL states each frame.

How to translate the camera in GLES2.0?

I want to create a camera moving above a tiled plane. The camera is supposed to move in the XY-plane only and to look straight down all the time. With an orthogonal projection I expect a pseudo-2D renderer.
My problem is, that I don't know how to translate the camera. After some research it seems to me, that there is nothing like a "camera" in OpenGL and I have to translate the whole world. Changing the eye-position and view center coordinates in the Matrix.setLookAtM-function just leads to distorted results.
Translating the whole MVP-Matrix does not work either.
I'm running out of ideas now; do I have to translate every single vertex every frame directly in the vertex buffer? That does not seem plausible to me.
I derived GLSurfaceView and implemented the following functions to setup and update the scene:
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
// Setup the projection Matrix for an orthogonal view
Matrix.orthoM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}
public void onDrawFrame(GL10 unused) {
// Draw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
//Setup the camera
float[] camPos = { 0.0f, 0.0f, -3.0f }; //no matter what else I put in here the camera seems to point
float[] lookAt = { 0.0f, 0.0f, 0.0f }; // to the coordinate center and distorts the square
// Set the camera position (View matrix)
Matrix.setLookAtM( vMatrix, 0, camPos[0], camPos[1], camPos[2], lookAt[0], lookAt[1], lookAt[2], 0f, 1f, 0f);
// Calculate the projection and view transformation
Matrix.multiplyMM( mMVPMatrix, 0, projMatrix, 0, vMatrix, 0);
//rotate the viewport
Matrix.setRotateM(mRotationMatrix, 0, getRotationAngle(), 0, 0, -1.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mRotationMatrix, 0, mMVPMatrix, 0);
//I also tried to translate the viewport here
// (and several other places), but I could not find any solution
//draw the plane (actually a simple square right now)
mPlane.draw(mMVPMatrix);
}
Changing the eye-position and view center coordinates in the "LookAt"-function just leads to distorted results.
If you got this from the android tutorial, I think they have a bug in their code. (made a comment about it here)
Try the following fixes:
Use setLookatM to point to where you want the camera to be.
In the shader, change the gl_Position line
from: " gl_Position = vPosition * uMVPMatrix;"
to: " gl_Position = uMVPMatrix * vPosition;"
I'd think the //rotate the viewport section should be removed as well, as this is not rotating the camera properly. You can change the camera's orientation in the setlookat function.

Java OpenGL screen sized texture mapped quad

I have a Java OpenGL (JOGL) app, and I'm trying to create a texture mapped quad that covers the entire screen. In draw some pixels to a buffer and then I want to read those pixels into a texture and redraw them on screen (with a fragment shader applied). My code for mapping the texture to the viewport is:
gl.glMatrixMode(GL.GL_PROJECTION);
gl.glPushMatrix();
gl.glLoadIdentity();
gl.glOrtho( 0, width, height, 0, -1, 1 );
gl.glMatrixMode(GL.GL_MODELVIEW);
gl.glPushMatrix();
gl.glLoadIdentity();
IntBuffer ib = IntBuffer.allocate(1);
gl.glEnable(GL.GL_TEXTURE_2D);
gl.glGenTextures(1, ib);
gl.glPixelStorei(GL.GL_PACK_ALIGNMENT, 1);
//buff contains pixels read from glReadPixels
gl.glTexImage2D(GL.GL_TEXTURE_2D, 0, GL.GL_RGBA, width, height, 0, GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, buff);
gl.glBindTexture(GL.GL_TEXTURE_2D, ib.get(0));
gl.glBegin(GL.GL_QUADS);
gl.glTexCoord2f(0,1);
gl.glVertex2f(0,0);
gl.glTexCoord2f(0,0);
gl.glVertex2f(0,height);
gl.glTexCoord2f(1,0);
gl.glVertex2f(width,height);
gl.glTexCoord2f(1,1);
gl.glVertex2f(width,0);
gl.glEnd();
gl.glBindTexture(GL.GL_TEXTURE_2D, 0);
gl.glPopMatrix();
gl.glPopMatrix();
The end result is a quad that is not covering the whole viewport (it's partially on) and that does not contain the pixels from the buffer. What am I doing incorrectly here?
thanks,
Jeff
First, you should only create the texture in your initialization code. You should not be calling glTexImage2D every frame. Only call glTexImage2D again if the size of the texture changes; glTexSubImage2D can be used to upload data to the texture. Think of glTexImage2D as "new", while glTexSubImage2D as a memory copy.
Do this once, after initializing OpenGL.
IntBuffer ib = IntBuffer.allocate(1); //Store this in your object
gl.glGenTextures(1, ib);
gl.glPixelStorei(GL.GL_PACK_ALIGNMENT, 1);
//buff contains pixels read from glReadPixels
gl.glBindTexture(GL.GL_TEXTURE_2D, ib.get(0));
gl.glTexImage2D(GL.GL_TEXTURE_2D, 0, GL.GL_RGBA, width, height, 0, GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, 0);
gl.glBindTexture(GL.GL_TEXTURE_2D, 0);
Then, each frame, do this:
gl.glMatrixMode(GL.GL_PROJECTION);
gl.glPushMatrix();
gl.glLoadIdentity();
gl.glMatrixMode(GL.GL_MODELVIEW);
gl.glPushMatrix();
gl.glLoadIdentity();
gl.glBindTexture(GL.GL_TEXTURE_2D, ib.get(0)); //Retrieved from your object
gl.glEnable(GL.GL_TEXTURE_2D);
gl.glTexSubImage2D(GL.GL_TEXTURE_2D, 0, 0, 0, width, height, 0, GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, buff);
gl.glBegin(GL.GL_QUADS);
gl.glTexCoord2f(0,1);
gl.glVertex2f(-1, -1);
gl.glTexCoord2f(0, 0);
gl.glVertex2f(-1, 1);
gl.glTexCoord2f(1, 0);
gl.glVertex2f(1, 1);
gl.glTexCoord2f(1, 1);
gl.glVertex2f(1, -1);
gl.glEnd();
gl.glMatrixMode(GL.GL_MODELVIEW);
gl.glPopMatrix();
gl.glMatrixMode(GL.GL_PROJECTION);
gl.glPopMatrix();
gl.glMatrixMode(GL.GL_MODELVIEW);
By using identity for projection and modelview, we are able to supply vertex coordinates directly in clip-space. The [-1, 1] range in clip-space maps to [0, width/height] in window space. So we don't have to know or care about how big the window is; as long as the glViewport was set up correctly, this should work.
It may not be the problem, but it won't be helping: You are popping the modelview matrix twice for a single push. You are not popping the projection matrix at all.
I would recommend setting the projection matrix once at startup, without doing any pushes or pops. You don't really need to push and pop the modelview matrix either. (You could do your texture setup once at startup, too.)
I would start with checking glError with code like the below. Note I used the GL2 object because there were some issues with older versions of JOGL and the GL object, silly things like GL_QUADS not being there.
If you have a shader enabled with the above code, you need to do the texturing by reading the sampler. If so, please attach the shader code you are using with this rendering code.
private static void checkForGLErrors(GL2 gl) {
int errno = gl.glGetError();
switch (errno) {
case GL2.GL_INVALID_ENUM:
System.err.println("OpenGL Error: Invalid ENUM");
break;
case GL2.GL_INVALID_VALUE:
System.err.println("OpenGL Error: Invalid Value");
break;
case GL2.GL_INVALID_OPERATION:
System.err.println("OpenGL Error: Invalid Operation");
break;
case GL2.GL_STACK_OVERFLOW:
System.err.println("OpenGL Error: Stack Overflow");
break;
case GL2.GL_STACK_UNDERFLOW:
System.err.println("OpenGL Error: Stack Underflow");
break;
case GL2.GL_OUT_OF_MEMORY:
System.err.println("OpenGL Error: Out of Memory");
break;
default:
return;
}
}
I would also try to avoid generating the texture every frame if it is something that doesn't change. You can save the textureId and bind it later.

FrameBuffer Objects for Dynamic Cubemaps in GLES 2.0 on Android

Update: Turns out this is a driver issue with the powerVR SGX in my Nexus S. Code works fluidly on all other devices I've tested it on.
I'll be making a smaller test case and submitting a bug report... to someone. Don't know who.
Hey guys,
First off, I'm working on a port of AndAR (ARToolkit for Android) to GLES 2.0 using the Java GLES2.0 bindings. The entirety of my code can be found here if you're curious, but I'll try to sum up the problem in this question. AndARShaders
I'm attempting to implement this paper to generate AR renderings which reflect and refract believably: Virtual Reflections in Augmented Reality Environments. In order to do this, the screen space bounding box of the object to be rendered is determined, then used to generate texture coordinates for planes representing each face of the cubemap. This means rendering a cubemap for each frame for each model. I'm only rendering one model at a time right now. I'm trying to use framebuffer objects to render the cubemap based on the method described in the paper.
Anyway, to the problem.
I have it mostly implemented. As far as I can tell, at least the front face of the cubemap has vertices and UV coordinates generated correctly. I can render my front face to the system provided frame buffer for the screen and it renders just fine without problems for as long as I'd like. The problem comes in with rendering it to a framebuffer object.
When I render my cubemap faces to a framebuffer object linked to a cubemap texture, GL eats ALL of my memory within a few seconds and crashes with a GLError 1285 (OUT OF MEMORY). If I don't bind the FBO, I can render the cubemap faces to the screen without any memory issues. Cube map texture size is 128 PX square, which should be reasonable for a mobile device. Somehow, GL is leaking memory
Here's the rough order I'm doing things. This is the entry to the render for this frame. (src/edu/dhbw/andar/ARGLES20Renderer.java ~Line 179)
// BEGIN TO DRAW FRAME. DRAW BACKGROUND CAMERA IMAGE TO QUAD
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // Camera image is stored in Texture0
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureName);
//load new preview frame as a texture, if needed
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, previewFrameWidth, previewFrameHeight, mode, GLES20.GL_UNSIGNED_BYTE, frameData);
//draw camera preview frame:
squareBuffer.position(0);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, GraphicsUtil.TRIANGLE_VERTICES_DATA_STRIDE_BYTES, squareBuffer);
GLES20.glEnableVertexAttribArray(maPositionHandle);
textureBuffer.position(0);
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, GraphicsUtil.TRIANGLE_VERTICES_UV_STRIDE_BYTES, textureBuffer);
GLES20.glEnableVertexAttribArray(maTextureHandle);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniform1i(mSamplerLoc, 0);
//draw camera square
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glDisableVertexAttribArray(maPositionHandle);
GLES20.glDisableVertexAttribArray(maTextureHandle);
DRAW_OBJECTS();
And the code to draw the objects goes somewhat like this:
( src/edu/dhbw/andar/ARGLES20Object.java ~ Line 36 )
( src/edu/dhbw/andar/pub/CustomGLES20Object.java ~Line 55 )
// Use the new program for the object (Refract/reflect shader)
GLES20.glUseProgram( mProgram );
// Transform to where the marker is
Matrix.multiplyMM(mMVPMatrix, 0, glCameraMatrix, 0, glMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Create a cubemap for this object from vertices
GENERATE_CUBEMAP( box.vertArray() );
// Feed in Verts
box.verts().position(0);
box.normals().position(0);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, VERTEX_NORMAL_DATA_STRIDE, box.verts());
GLES20.glEnableVertexAttribArray(maPositionHandle);
GLES20.glVertexAttribPointer(maNormalHandle, 3, GLES20.GL_FLOAT, false, VERTEX_NORMAL_DATA_STRIDE, box.normals());
GLES20.glEnableVertexAttribArray(maNormalHandle);
// Set Uniforms...
GLES20.glUniform4f(muColor, 0.0f, 1.0f, 0.0f, 1.0f);
...
// Draw the cube faces
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
...
GLES20.glDisableVertexAttribArray(maPositionHandle);
GLES20.glDisableVertexAttribArray(maNormalHandle);
Note the GENERATE_CUBEMAP( Vertices ) Toward the beginning of rendering the object. Here's what that does. Screen space bounding box [ssbb] has been calculated from vertices.
(src/edu/dhbw/andar/ARGLES20Renderer.java ~Line 280)
// Grab the current viewport and program for restoration later
int[] OldViewport = new int[4], OldProgram = new int[1];
GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, OldViewport, 0);
GLES20.glGetIntegerv(GLES20.GL_CURRENT_PROGRAM, OldProgram, 0);
// Update dynamic cubemap based on screen space bounding box for this frame
mDC.UpdateUVs( DynamicCubemap.CorrectSSBB( ssbb ), widthcorrection, heightcorrection );
// Set up the program used to render to the texture
GLES20.glUseProgram(mProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureName);
float[] projmatrix = new float[16]; // Projection Matrix
Matrix.orthoM(projmatrix, 0, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, 1.0f);
Matrix.multiplyMM(mMVPMatrix, 0, projmatrix, 0, mVMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniform1i(mSamplerLoc, 0); // Use the camera texture (bound in unit zero)
// Render to the front face of the cubemap
// Note: If I don't bind the new Framebuffer, this
// renders the face to the screen very nicely without memory issues
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_CUBE_MAP, 0); // Ensure we aren't rendering to the same texture we're using
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffers[5]);
GLES20.glViewport( 0, 0, edu.dhbw.andar.Config.CUBEMAP_SIZE, edu.dhbw.andar.Config.CUBEMAP_SIZE);
GLES20.glClear( GLES20.GL_COLOR_BUFFER_BIT );
mDC.DrawFace( 5, maPositionHandle, maTextureHandle ); // Draw the front face with glDrawArrays
// Unbind the framebuffer, we no longer need to render to textures.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
// Ensure the newly generated cubemap is bound to the correct texture unit
GLES20.glBindTexture(GLES20.GL_TEXTURE_CUBE_MAP, mCubeMapTexture);
// Bind the old program and viewport
GLES20.glUseProgram( OldProgram[0] );
GLES20.glViewport( OldViewport[0], OldViewport[1], OldViewport[2], OldViewport[3] );
And That's it... Here's how I initialize my FBOs and Cubemap Textures when the program starts.
(src/edu/dhbw/andar/ARGLES20Renderer.java ~Line 128)
// Generate Cubemap Textures
int[] cubemaptextures = new int[1];
GLES20.glGenTextures(1, cubemaptextures, 0 );
mCubeMapTexture = cubemaptextures[0];
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_CUBE_MAP, mCubeMapTexture);
for( int i = 0; i < 6; i++ ) {
GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, mode, CUBEMAP_SIZE, CUBEMAP_SIZE, 0, mode, GLES20.GL_UNSIGNED_BYTE, ByteBuffer.wrap(frame));
}
GLES20.glTexParameterf(GLES20.GL_TEXTURE_CUBE_MAP, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_CUBE_MAP, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_CUBE_MAP, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_CUBE_MAP, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glBindTexture(GLES20.GL_TEXTURE_CUBE_MAP, 0);
// Create a set of FrameBuffers for the cubemap
mFrameBuffers = new int[6];
GLES20.glGenFramebuffers(6, mFrameBuffers, 0);
for( int i = 0; i < 6; i++ ) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffers[i]);
GLES20.glFramebufferTexture2D( GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_CUBE_MAP_POSITIVE_X + i, mCubeMapTexture, 0 );
GLES20.glCheckFramebufferStatus( GLES20.GL_FRAMEBUFFER );
}
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
Perhaps my ordering is wrong, or my setup is incorrect?
Sorry for the LONG post. I really did everything in my power to make this as short as possible while still giving enough information to solve the problem. I cut out a lot of extra code which is application specific. If you're interested, or you think the problem might be caused elsewhere, I included links to the actual source files so you can take a quick peek.
Thanks for your time! I've wasted FAR too much time on this.
-Griff
Edit: clarified texture size

Categories

Resources