JOGL shadow mapping - java

I am translating OpenGL SuperBible demos into Java using JOGL. All the demos up until chapter 12 went well, but now I am stuck at shadow mapping. Here is my setup:
public class ShadowMap implements GLEventListener
{
public static void main(String[] args)
{
ShadowMap rend = new ShadowMap();
window.NEWTWindowStarter.init(rend, null, null);
}
private int light_program, camera_program;
private int[] obj, bkg;
private int camera_mv_pointer, camera_proj_pointer, light_mvp_pointer, camera_shadow_pointer;
private int fbo, depth_tex;
private int screen_width, screen_height;
private float aspect;
private double[] light_position, camera_position;
private double[][] light_proj_matrix, light_view_matrix;
private double[][] camera_proj_matrix, camera_view_matrix;
private double[][] bias_matrix, shadow_matrix;
private double[][] obj_model_matrix, bkg_model_matrix;
#Override
public void init(GLAutoDrawable glAutoDrawable)
{
GL4 gl = glAutoDrawable.getGL().getGL4bc();
ObjectParser parser = new ObjectParser("pawn_s.obj");
obj = BufferController.prepareVAO(gl, parser.getDataHolder());
parser = new ObjectParser("bkg.obj");
bkg = BufferController.prepareVAO(gl, parser.getDataHolder());
light_program = ShaderController.init(gl, "shaders/demo/d25/light_v.glsl", "shaders/demo/d25/light_f.glsl");
light_mvp_pointer = gl.glGetUniformLocation(light_program, "mvp");
camera_program = ShaderController.init(gl, "shaders/demo/d25/camera_v.glsl", "shaders/demo/d25/camera_f.glsl");
camera_mv_pointer = gl.glGetUniformLocation(camera_program, "mv_matrix");
camera_proj_pointer = gl.glGetUniformLocation(camera_program, "proj_matrix");
camera_shadow_pointer = gl.glGetUniformLocation(camera_program, "shadow_matrix");
depth_tex = BufferController.generateTextureId(gl);
gl.glBindTexture(gl.GL_TEXTURE_2D, depth_tex);
gl.glTexStorage2D(gl.GL_TEXTURE_2D, 11, gl.GL_DEPTH_COMPONENT32F, 1024, 1024);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP_TO_BORDER);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP_TO_BORDER);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_COMPARE_MODE, gl.GL_COMPARE_REF_TO_TEXTURE);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_COMPARE_FUNC, gl.GL_LEQUAL);
gl.glActiveTexture(gl.GL_TEXTURE0);
gl.glBindTexture(gl.GL_TEXTURE_2D, depth_tex);
fbo = BufferController.generateFrameId(gl);
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, fbo);
gl.glFramebufferTexture(gl.GL_FRAMEBUFFER, gl.GL_DEPTH_ATTACHMENT, depth_tex, 0);
gl.glDrawBuffer(gl.GL_NONE);
gl.glReadBuffer(gl.GL_NONE);
if(gl.glCheckFramebufferStatus(gl.GL_FRAMEBUFFER) == gl.GL_FRAMEBUFFER_COMPLETE)
System.out.println("FrameBuffer OK!");
else
System.out.println("FrameBuffer FAIL!");
gl.glBindTexture(gl.GL_TEXTURE_2D, 0);
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0);
gl.glEnable(gl.GL_DEPTH_TEST);
}
#Override
public void dispose(GLAutoDrawable glAutoDrawable){}
#Override
public void display(GLAutoDrawable glAutoDrawable)
{
GL4bc gl = glAutoDrawable.getGL().getGL4bc();
computerMatrices();
gl.glEnable(gl.GL_DEPTH_TEST);
// depth pass
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, fbo);
gl.glViewport(0, 0, 1024, 1024);
gl.glUseProgram(light_program);
drawFromLight(gl, obj, obj_model_matrix);
drawFromLight(gl, bkg, bkg_model_matrix);
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0);
// final pass
gl.glViewport(0, 0, screen_width, screen_height);
gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT);
gl.glBindTexture(gl.GL_TEXTURE_2D, depth_tex);
gl.glDrawBuffer(gl.GL_BACK);
gl.glUseProgram(camera_program);
drawFromCamera(gl, obj, obj_model_matrix);
drawFromCamera(gl, bkg, bkg_model_matrix);
gl.glBindTexture(gl.GL_TEXTURE_2D, 0);
}
private void computerMatrices()
{
bkg_model_matrix = Matrix.model(0, 0, 0, 0, 0, 0, 1);
obj_model_matrix = Matrix.model(0, 0, 0, 0, 0, 0, 1);
light_position = new double[]{10, 10, 10};
camera_position = new double[]{10, 0, 10};
light_proj_matrix = Matrix.frustum(-1.0, 1.0, -1.0, 1.0, 1.0, 200.0);
light_view_matrix = Matrix.lookAt(light_position, new double[]{0, 0, 0}, new double[]{0, 1, 0});
camera_proj_matrix = Matrix.perspective(60, aspect, 0.1f, 1000f);
camera_view_matrix = Matrix.lookAt(camera_position, new double[]{0, 0, 0}, new double[]{0, 1, 0});
bias_matrix = new double[][]{
{0.5, 0.0, 0.0, 0.0},
{0.0, 0.5, 0.0, 0.0},
{0.0, 0.0, 0.5, 0.0},
{0.5, 0.5, 0.5, 1.0}};
shadow_matrix = Matrix.mult(bias_matrix, light_proj_matrix, light_view_matrix);
}
private void drawFromLight(GL4bc gl, int[] obj, double[][] model_matrix)
{
gl.glUniformMatrix4fv(light_mvp_pointer, 1, false, Matrix.toArrayF(Matrix.mult(light_proj_matrix, light_view_matrix, model_matrix)), 0);
BufferControllerDep.renderTrianglesVAO(gl, obj[0], obj[1], obj[2]);
}
private void drawFromCamera(GL4bc gl, int[] obj, double[][] model_matrix)
{
gl.glUniformMatrix4fv(camera_shadow_pointer, 1, false, Matrix.toArrayF(Matrix.mult(shadow_matrix, model_matrix)), 0);
gl.glUniformMatrix4fv(camera_proj_pointer, 1, false, Matrix.toArrayF(camera_proj_matrix), 0);
gl.glUniformMatrix4fv(camera_mv_pointer, 1, false, Matrix.toArrayF(Matrix.mult(camera_view_matrix, model_matrix)), 0);
BufferControllerDep.renderTrianglesVAO(gl, obj[0], obj[1], obj[2]);
}
#Override
public void reshape(GLAutoDrawable glAutoDrawable, int x, int y, int width, int height)
{
screen_width = width;
screen_height = height;
aspect = (1.0f * screen_width) / screen_height;
}
}
Light shaders are very basic. Camera Vertex shader is as follows:
#version 430 core
uniform mat4 mv_matrix;
uniform mat4 proj_matrix;
uniform mat4 shadow_matrix;
layout (location = 0) in vec4 position;
layout (location = 1) in vec3 normal;
out vec4 shadow_coord;
uniform vec3 light_pos = vec3(10.0, 10.0, 10.0);
out vec3 N;
out vec3 L;
void main(void)
{
N = normalize(mat3(mv_matrix) * normal);
L = normalize(light_pos - (mv_matrix * position).xyz);
shadow_coord = shadow_matrix * position;
gl_Position = proj_matrix * mv_matrix * position;
}
And camera Fragment shader:
#version 430 core
layout (location = 0) out vec4 color;
layout (binding = 0) uniform sampler2DShadow shadow_tex;
in vec4 shadow_coord;
uniform vec3 D = vec3(0.9, 0.8, 1.0);
in vec3 N;
in vec3 L;
void main(void)
{
vec3 diffuse = dot(N, L) * D;
float shadow = textureProj(shadow_tex, shadow_coord);
// color = vec4(diffuse, 1.0);
color = shadow * vec4(1);
}
Just rendering diffuse works fine. Projection, LookAt, and Transformation matrices seem to work.
However when trying to render shadows, it fails:
Moving light closer to the object produces this:
I use my own Matrix and Vector classes and can provide source code if needed.
Any insight is appreciated. Thank you!

The problem was caused by the reverse matrix multiplication order in Java code. Here is corrected code:
private void drawFromLight(GL4bc gl, int[] obj, double[][] model_matrix)
{
gl.glUniformMatrix4fv(light_mvp_pointer, 1, false,
Matrix.toArrayF(Matrix.mult(model_matrix, light_view_matrix, light_proj_matrix)), 0);
BufferControllerDep.renderTrianglesVAO(gl, obj[0], obj[1], obj[2]);
}
private void drawFromCamera(GL4bc gl, int[] obj, double[][] model_matrix)
{
gl.glUniformMatrix4fv(camera_shadow_pointer, 1, false,
Matrix.toArrayF(Matrix.mult(model_matrix, shadow_matrix)), 0);
gl.glUniformMatrix4fv(camera_mv_pointer, 1, false,
Matrix.toArrayF(Matrix.mult(model_matrix, camera_view_matrix)), 0);
gl.glUniformMatrix4fv(camera_mvp_pointer, 1, false,
Matrix.toArrayF(Matrix.mult(model_matrix, camera_view_matrix, camera_proj_matrix)), 0);
BufferControllerDep.renderTrianglesVAO(gl, obj[0], obj[1], obj[2]);
}
Using the same shaders this will produce following result:
Also adding
gl.glEnable(gl.GL_POLYGON_OFFSET_FILL);
gl.glPolygonOffset(2.0f, 0.0f);
before performing "light pass" gets rid of distortion:
Mixing shadows and diffuse produces this:
There is still room for improvement however...

Related

Trying to draw a model with Gouraud shading in jogl for desktop, but is flat

I already have a drawn model, but it has flat shading (for what I understand it should be smooth by default...)
This is the initial config:
private void SetLightningAndMaterials(){
//float[] lightPos = {1, 1, 1, 0};
float[] lightPos = {0, 0, 1, 0};
float[] lightColorDiffuse = {1, 1, 1, 1};
float[] lightColorAmbient = {0.2f, 0.2f, 0.2f, 1};
gl.glShadeModel(GL.GL_SMOOTH);
gl.glLightfv(GL.GL_LIGHT1, GL.GL_POSITION, lightPos, 0);
gl.glLightfv(GL.GL_LIGHT1, GL.GL_DIFFUSE, lightColorDiffuse, 0);
gl.glLightfv(GL.GL_LIGHT1, GL.GL_AMBIENT, lightColorAmbient, 0);
gl.glEnable(GL.GL_LIGHT1);
gl.glEnable(GL.GL_LIGHTING);
gl.glMaterialfv(GL.GL_FRONT, GL.GL_AMBIENT, ambientColour, 0);
gl.glMaterialfv(GL.GL_FRONT, GL.GL_DIFFUSE, mesh.colour, 0);
gl.glEnable(GL.GL_LIGHTING);
gl.glEnable(GL.GL_LIGHT0);
float[] noAmbient =
{ 0.1f, 0.1f, 0.1f, 1f }; // low ambient light
float[] spec =
{ 1f, 0.6f, 0f, 1f }; // low ambient light
float[] diffuse =
{ 0.5f, 0.5f, 0.5f, 1f };
gl.glLightfv(GL.GL_LIGHT0, GL.GL_AMBIENT, noAmbient, 0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_SPECULAR, spec, 0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_DIFFUSE, diffuse, 0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_POSITION, new float[]{0,0,10,1}, 0);
}
And this is how I draw the model:
public void Draw(GL gl, GLU glu){
Vec3d normal;
MassPoint vertex1, vertex2, vertex3;
int faceIndex=0;
Face surfaceFace;
for (faceIndex=0; faceIndex<surfaceFaces.size();faceIndex++){
surfaceFace = surfaceFaces.get(faceIndex);
surfaceFace.recalculateNormal();
vertex1 = surfaceFace.vertex1;
vertex2 = surfaceFace.vertex2;
vertex3 = surfaceFace.vertex3;
normal = surfaceFace.normal;
gl.glBegin(gl.GL_TRIANGLES);
gl.glNormal3d(normal.x, normal.y, normal.z);
gl.glMaterialfv(GL.GL_FRONT, GL.GL_DIFFUSE, colour, 0);
gl.glVertex3d(vertex1.position.x, vertex1.position.y, vertex1.position.z);
gl.glVertex3d(vertex2.position.x, vertex2.position.y, vertex2.position.z);
gl.glVertex3d(vertex3.position.x, vertex3.position.y, vertex3.position.z);
gl.glEnd();
}
}
I want to believe there's an easy way of solving this without having to create a shader (I don't have any idea how to set these in Java).
I'm using JOGL 1 by the way, and is probably an old version (the imports are like javax.media.opengl.*).
I managed to solve the problem. For smoothness to work, the drawing expects 3 normals (one per vertex), I was only passing 1 normal (one per face).
Here's the new code for the drawing:
public void Draw(GL gl, GLU glu) {
Vec3d[] normalsPerVertex = new Vec3d[3];
MassPoint vertex1, vertex2, vertex3;
int faceIndex=0;
Face surfaceFace;
for (faceIndex=0; faceIndex<surfaceFaces.size();faceIndex++){
surfaceFace = surfaceFaces.get(faceIndex);
vertex1=surfaceFace.vertex1;
normalsPerVertex[0] = vertex1.CalcNormal();
vertex2=surfaceFace.vertex2;
normalsPerVertex[1] = vertex2.CalcNormal();
vertex3=surfaceFace.vertex3;
normalsPerVertex[2] = vertex3.CalcNormal();
gl.glBegin(GL.GL_TRIANGLES);
gl.glNormal3d(normalsPerVertex[0].x, normalsPerVertex[0].y, normalsPerVertex[0].z);
gl.glVertex3d(vertex1.position.x, vertex1.position.y, vertex1.position.z);
gl.glNormal3d(normalsPerVertex[1].x, normalsPerVertex[1].y, normalsPerVertex[1].z);
gl.glVertex3d(vertex2.position.x, vertex2.position.y, vertex2.position.z);
gl.glNormal3d(normalsPerVertex[2].x, normalsPerVertex[2].y, normalsPerVertex[2].z);
gl.glVertex3d(vertex3.position.x, vertex3.position.y, vertex3.position.z);
gl.glEnd();
}
}
The calculated normal for each vertex is the media of all the faces connected to that vertex. Here's the code for that:
public Vec3d CalcNormal() {
Vec3d normalMedia = new Vec3d();
for (Face face : facesRelated) {
face.recalculateNormal();
normalMedia.add(face.normal);
}
normalMedia.mul(1d/facesRelated.size());
return normalMedia;
}
Hope this helps someone else.

JOGL display using Buffer

I have 3 FloatBuffers - vertices, normals and colors which contain what the name suggests. I also have an IntBuffer to keep track of the indexing.
The data looks correct, but I'm having trouble displaying it. I just see a blank canvas. I'm not sure what I am doing wrong. I'm guessing something is being overlooked in init(), display(), reshape() and dispose(). Can anyone tell me if you find something glaringly wrong in the code below, and why you think nothing is being displayed?
#Override
public void init(GLAutoDrawable drawable) {
GL2 gl = drawable.getGL().getGL2();
gl.glClearColor(.0f, .0f, .2f, 0.9f);
gl.glEnable(GL2.GL_DEPTH_TEST);
gl.glDepthFunc(GL2.GL_LESS);
gl.glEnable(GL2.GL_CULL_FACE);
gl.glEnable(GL2.GL_LIGHTING);
gl.glEnable(GL2.GL_LIGHT0);
gl.glEnable(GL2.GL_AUTO_NORMAL);
gl.glEnable(GL2.GL_NORMALIZE);
gl.glFrontFace(GL2.GL_CCW);
gl.glCullFace(GL2.GL_BACK);
gl.glHint(GL2.GL_PERSPECTIVE_CORRECTION_HINT, GL2.GL_NICEST);
gl.glShadeModel(GL2.GL_SMOOTH);
if (viewMesh) {
gl.glPolygonMode(GL2.GL_FRONT_AND_BACK, GL2.GL_LINE);
} else {
gl.glPolygonMode(GL2.GL_FRONT_AND_BACK, GL2.GL_FILL);
}
glu = new GLU();
// Build the VBOs
VBO = IntBuffer.allocate(4);
gl.glGenBuffers(4, VBO);
vertVBOID = VBO.get(0);
normalVBOID = VBO.get(1);
colorVBOID = VBO.get(2);
indexVBOID = VBO.get(3);
// vertices
int vsize = sd.verts.capacity() * BufferUtil.SIZEOF_FLOAT;
gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, vertVBOID); // get a valid name
gl.glBufferData(GL2.GL_ARRAY_BUFFER, vsize, sd.verts, GL2.GL_STATIC_DRAW);
gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, 0); // reset
// normals
int nsize = sd.normals.capacity() * BufferUtil.SIZEOF_FLOAT;
gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, normalVBOID);
gl.glBufferData(GL2.GL_ARRAY_BUFFER, nsize, sd.normals, GL2.GL_STATIC_DRAW);
gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, 0);
// colors
int csize = sd.colors.capacity() * BufferUtil.SIZEOF_FLOAT;
gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, colorVBOID);
gl.glBufferData(GL2.GL_ARRAY_BUFFER, csize, sd.colors, GL2.GL_STATIC_DRAW);
gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, 0);
int isize = sd.indices.capacity() * BufferUtil.SIZEOF_INT;
gl.glBindBuffer(GL2.GL_ELEMENT_ARRAY_BUFFER, indexVBOID);
gl.glBufferData(GL2.GL_ELEMENT_ARRAY_BUFFER, isize, sd.indices, GL2.GL_STATIC_DRAW);
gl.glBindBuffer(GL2.GL_ELEMENT_ARRAY_BUFFER, 0);
// sd.verts = null; // copy of data is no longer necessary, it is in the graphics card now.
// sd.colors = null;
// sd.normals = null;
// sd.indices = null;
}
#Override
public void display(GLAutoDrawable drawable) {
GL2 gl = drawable.getGL().getGL2();
gl.glMatrixMode(GL2.GL_MODELVIEW);
gl.glLoadIdentity();
glu.gluLookAt(45, 0, 0, 0, 0, 0, 0.0, 1.0, 0.0);
gl.glScalef(scale, scale, scale);
gl.glRotatef(rot, 0, 1, 0);
gl.glTranslatef(-sd.tr_x, -sd.tr_y, -sd.tr_z);
gl.glEnableClientState(GLPointerFunc.GL_VERTEX_ARRAY);
gl.glEnableClientState(GLPointerFunc.GL_NORMAL_ARRAY);
gl.glEnableClientState(GLPointerFunc.GL_COLOR_ARRAY);
gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, normalVBOID);
gl.glNormalPointer(GL2.GL_FLOAT, 0, 0);
gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, colorVBOID);
gl.glColorPointer(3, GL2.GL_FLOAT, 0, 0);
gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, vertVBOID);
gl.glVertexPointer(3, GL2.GL_FLOAT, 0, 0);
gl.glBindBuffer(GL2.GL_ELEMENT_ARRAY_BUFFER, indexVBOID);
gl.glDrawElements(GL2.GL_TRIANGLES, sd.indices.capacity(), GL2.GL_INT, 0);
gl.glBindBuffer(GL2.GL_ELEMENT_ARRAY_BUFFER, 0); // unbind it
gl.glDisableClientState(GLPointerFunc.GL_VERTEX_ARRAY);
gl.glDisableClientState(GLPointerFunc.GL_NORMAL_ARRAY);
gl.glDisableClientState(GLPointerFunc.GL_COLOR_ARRAY);
}
#Override
public void reshape(GLAutoDrawable drawable, int x, int y, int w, int h) {
GL2 gl = drawable.getGL().getGL2();
double fov = (Math.PI / 4.0);
double zmm = Math.abs(sd.min_z - sd.max_z);
camdist = (zmm / 2.0) / Math.tan(fov / 2.0);
h = (h == 0) ? 1 : h;
gl.glViewport(0, 0, w, h);
gl.glMatrixMode(GL2.GL_PROJECTION);
gl.glLoadIdentity();
glu.gluPerspective(camdist, w / (float) h, 0.1f, 1000.0);
}
/**
* Initilaize graphics
*/
public void initOGL() {
profile = GLProfile.get(GLProfile.GL2);
caps = new GLCapabilities(profile);
caps.setHardwareAccelerated(true);
canvas = new GLCanvas(caps);
canvas.addGLEventListener(this);
canvas.requestFocusInWindow();
getContentPane().add(canvas); // add to the frame
}
#Override
public void dispose(GLAutoDrawable drawable) {
}
One clear problem is in the draw call:
gl.glDrawElements(GL2.GL_TRIANGLES, sd.indices.capacity(), GL2.GL_INT, 0);
GL_INT is not valid for the type argument of glDrawElements(). The only valid values are GL_UNSIGNED_BYTE, GL_UNSIGNED_SHORT and GL_UNSIGNED_INT. So the call needs to be:
gl.glDrawElements(GL2.GL_TRIANGLES, sd.indices.capacity(), GL2.GL_UNSIGNED_INT, 0);
Anytime you have a problem with OpenGL code not working as expected, make sure that you call glGetError(). With the call from your code, you should immediately get a GL_INVALID_ENUM error.
Also, your perspective setup code looks somewhat suspicious. I don't fully understand what you're trying to do there, but even just the naming suggests a possible misunderstanding:
glu.gluPerspective(camdist, w / (float) h, 0.1f, 1000.0);
The first argument of gluPerspective() is the field-of-view angle (in degrees), not a distance.

Andengine GLES2 - Proper way to load one BitmapTextureAtlas for several TextureRegion

I'm new in Andengine and somewhere I found that If you want use one BitmapTextureAtlas for several ButtonSprites/Sprites you have to do it like this
BitmapTextureAtlas texAtlas = new BitmapTextureAtlas(activity.getTextureManager(), 316, 1062, TextureOptions.NEAREST_PREMULTIPLYALPHA);
for (int i = 0; i < 10; i++) {
TextureRegion pistolActive = BitmapTextureAtlasTextureRegionFactory.createFromAsset(texAtlas, activity.getApplicationContext(), POWERUP_TEXTURE_PATH, 0, 0);
pistolActive.set(0, 0, TILE_WIDTH, TILE_HEIGHT);
TextureRegion pistolUnactive = BitmapTextureAtlasTextureRegionFactory.createFromAsset(texAtlas, activity.getApplicationContext(), POWERUP_TEXTURE_PATH, 0, 0);
pistolUnactive.set(0, TILE_HEIGHT, TILE_WIDTH, TILE_HEIGHT);
TextureRegion pistolDeactive = BitmapTextureAtlasTextureRegionFactory.createFromAsset(texAtlas, activity.getApplicationContext(), POWERUP_TEXTURE_PATH, 0, 0);
pistolDeactive.set(0, TILE_HEIGHT * 2, TILE_WIDTH, TILE_HEIGHT);
texAtlas.load();
ButtonSprite button = new ButtonSprite(0, 0, pistolActive, pistolUnactive, pistolDeactive, activity.getVertexBufferObjectManager(), new ButtonSprite.OnClickListener() {
#Override
public void onClick(final ButtonSprite buttonSprite, float v, float v2) {
}
});
}
But now I check and if you put texAtlas.load() before loop-for it will work too. But what's the best practice ?
BitmapTextureAtlas texAtlas = new BitmapTextureAtlas(activity.getTextureManager(), 316, 1062, TextureOptions.NEAREST_PREMULTIPLYALPHA);
TextureRegion pistolActive = BitmapTextureAtlasTextureRegionFactory.createFromAsset(texAtlas, activity.getApplicationContext(), POWERUP_TEXTURE_PATH, 0, 0);
pistolActive.set(0, 0, TILE_WIDTH, TILE_HEIGHT);
TextureRegion pistolUnactive = BitmapTextureAtlasTextureRegionFactory.createFromAsset(texAtlas, activity.getApplicationContext(), POWERUP_TEXTURE_PATH, 0, 0);
pistolUnactive.set(0, TILE_HEIGHT, TILE_WIDTH, TILE_HEIGHT);
TextureRegion pistolDeactive = BitmapTextureAtlasTextureRegionFactory.createFromAsset(texAtlas, activity.getApplicationContext(), POWERUP_TEXTURE_PATH, 0, 0);
pistolDeactive.set(0, TILE_HEIGHT * 2, TILE_WIDTH, TILE_HEIGHT);
texAtlas.load();
for (int i = 0; i < 10; i++) {
ButtonSprite button = new ButtonSprite(0, 0, pistolActive, pistolUnactive, pistolDeactive, activity.getVertexBufferObjectManager(), new ButtonSprite.OnClickListener() {
#Override
public void onClick(final ButtonSprite buttonSprite, float v, float v2) {
}
});
}
No need to create texture regions multiple times.So do this manner.
Actually If you load in this way, you will get some UI problems.
So my suggestion is use different textures for different sprites to load
If you want to avoid loading problems then you can go for Sprite sheets(eg.. TexturePacker)

LibGDX 0.9.9 - Apply cubemap in environment

I am using LibGDX 0.9.9. I am trying to render cubemap and fog. So my code snippet below:
public void show() {
modelBatch = new ModelBatch();
environment = new Environment();
environment.set(new ColorAttribute(ColorAttribute.AmbientLight, 1.0f, 0.4f, 0.4f, 1f));
environment.set(new ColorAttribute(ColorAttribute.Fog, 0.9f, 1f, 0f, 1f));
environment.add(new DirectionalLight().set(0.8f, 0.8f, 0.8f, -1f, -0.8f, -0.2f));
cubemap = new Cubemap(Gdx.files.internal("cubemap/pos-x.png"),
Gdx.files.internal("cubemap/neg-x.png"),
Gdx.files.internal("cubemap/pos-y.png"),
Gdx.files.internal("cubemap/neg-y.png"),
Gdx.files.internal("cubemap/pos-z.png"),
Gdx.files.internal("cubemap/neg-z.png"));
environment.set(new CubemapAttribute(CubemapAttribute.EnvironmentMap, cubemap));
cam = new PerspectiveCamera(67, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
cam.position.set(1f, 1f, 1f);
cam.lookAt(0,0,0);
cam.near = 0.1f;
cam.far = 300f;
cam.update();
ModelLoader loader = new ObjLoader();
model = loader.loadModel(Gdx.files.internal("earth/earth.obj"));
instance = new ModelInstance(model);
NodePart blockPart = model.nodes.get(0).parts.get(0);
renderable = new Renderable();
blockPart.setRenderable(renderable);
renderable.environment = environment;
renderable.worldTransform.idt();
renderContext = new RenderContext(new DefaultTextureBinder(DefaultTextureBinder.WEIGHTED, 1));
shader = new DefaultShader(renderable);
shader.init();
camController = new CameraInputController(cam);
Gdx.input.setInputProcessor(camController);
}
#Override
public void render(float delta) {
camController.update();
Gdx.gl.glViewport(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
Gdx.gl.glClearColor(0, 0, 0, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
renderContext.begin();
shader.begin(cam, renderContext);
shader.render(renderable);
shader.end();
renderContext.end();
}
But nothing happens. I see object only.
What am I doing wrong?
After spending some time, I implemented cube map in LibGDX. Perhaps, it's not ideal solution, but there is nothing more (At least I couldn't find anything). So, I used native OpenGL ES functions and LibGDX. My class is below:
public class EnvironmentCubemap implements Disposable{
protected final Pixmap[] data = new Pixmap[6];
protected ShaderProgram shader;
protected int u_worldTrans;
protected Mesh quad;
private Matrix4 worldTrans;
private Quaternion q;
protected String vertexShader = " attribute vec3 a_position; \n"+
" attribute vec3 a_normal; \n"+
" attribute vec2 a_texCoord0; \n"+
" uniform mat4 u_worldTrans; \n"+
" varying vec2 v_texCoord0; \n"+
" varying vec3 v_cubeMapUV; \n"+
" void main() { \n"+
" v_texCoord0 = a_texCoord0; \n"+
" vec4 g_position = u_worldTrans * vec4(a_position, 1.0); \n"+
" v_cubeMapUV = normalize(g_position.xyz); \n"+
" gl_Position = vec4(a_position, 1.0); \n"+
" } \n";
protected String fragmentShader = "#ifdef GL_ES \n"+
" precision mediump float; \n"+
" #endif \n"+
" uniform samplerCube u_environmentCubemap; \n"+
" varying vec2 v_texCoord0; \n"+
" varying vec3 v_cubeMapUV; \n"+
" void main() { \n"+
" gl_FragColor = vec4(textureCube(u_environmentCubemap, v_cubeMapUV).rgb, 1.0); \n"+
" } \n";
public String getDefaultVertexShader(){
return vertexShader;
}
public String getDefaultFragmentShader(){
return fragmentShader;
}
public EnvironmentCubemap (Pixmap positiveX, Pixmap negativeX, Pixmap positiveY, Pixmap negativeY, Pixmap positiveZ, Pixmap negativeZ) {
data[0]=positiveX;
data[1]=negativeX;
data[2]=positiveY;
data[3]=negativeY;
data[4]=positiveZ;
data[5]=negativeZ;
init();
}
public EnvironmentCubemap (FileHandle positiveX, FileHandle negativeX, FileHandle positiveY, FileHandle negativeY, FileHandle positiveZ, FileHandle negativeZ) {
this(new Pixmap(positiveX), new Pixmap(negativeX), new Pixmap(positiveY), new Pixmap(negativeY), new Pixmap(positiveZ), new Pixmap(negativeZ));
}
//IF ALL SIX SIDES ARE REPRESENTED IN ONE IMAGE
public EnvironmentCubemap (Pixmap cubemap) {
int w = cubemap.getWidth();
int h = cubemap.getHeight();
for(int i=0; i<6; i++) data[i] = new Pixmap(w/4, h/3, Format.RGB888);
for(int x=0; x<w; x++)
for(int y=0; y<h; y++){
//-X
if(x>=0 && x<=w/4 && y>=h/3 && y<=h*2/3) data[1].drawPixel(x, y-h/3, cubemap.getPixel(x, y));
//+Y
if(x>=w/4 && x<=w/2 && y>=0 && y<=h/3) data[2].drawPixel(x-w/4, y, cubemap.getPixel(x, y));
//+Z
if(x>=w/4 && x<=w/2 && y>=h/3 && y<=h*2/3) data[4].drawPixel(x-w/4, y-h/3, cubemap.getPixel(x, y));
//-Y
if(x>=w/4 && x<=w/2 && y>=h*2/3 && y<=h) data[3].drawPixel(x-w/4, y-h*2/3, cubemap.getPixel(x, y));
//+X
if(x>=w/2 && x<=w*3/4 && y>=h/3 && y<=h*2/3) data[0].drawPixel(x-w/2, y-h/3, cubemap.getPixel(x, y));
//-Z
if(x>=w*3/4 && x<=w && y>=h/3 && y<=h*2/3) data[5].drawPixel(x-w*3/4, y-h/3, cubemap.getPixel(x, y));
}
cubemap.dispose();
cubemap=null;
init();
}
private void init(){
shader = new ShaderProgram(vertexShader, fragmentShader);
if (!shader.isCompiled())
throw new GdxRuntimeException(shader.getLog());
u_worldTrans = shader.getUniformLocation("u_worldTrans");
quad = createQuad();
worldTrans = new Matrix4();
q = new Quaternion();
initCubemap();
}
private void initCubemap(){
//bind cubemap
Gdx.gl20.glBindTexture(GL20.GL_TEXTURE_CUBE_MAP, 0);
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, GL20.GL_RGB, data[0].getWidth(), data[0].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[0].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, GL20.GL_RGB, data[1].getWidth(), data[1].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[1].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, GL20.GL_RGB, data[2].getWidth(), data[2].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[2].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, 0, GL20.GL_RGB, data[3].getWidth(), data[3].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[3].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_POSITIVE_Z, 0, GL20.GL_RGB, data[4].getWidth(), data[4].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[4].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, 0, GL20.GL_RGB, data[5].getWidth(), data[5].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[5].getPixels());
//Gdx.gl20.glGenerateMipmap(GL20.GL_TEXTURE_CUBE_MAP);
//Gdx.gl20.glTexParameteri(GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_MIN_FILTER, GL20.GL_LINEAR);
Gdx.gl20.glTexParameteri ( GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_MIN_FILTER,GL20.GL_LINEAR_MIPMAP_LINEAR );
Gdx.gl20.glTexParameteri ( GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_MAG_FILTER,GL20.GL_LINEAR );
Gdx.gl20.glTexParameteri ( GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_WRAP_S, GL20.GL_CLAMP_TO_EDGE );
Gdx.gl20.glTexParameteri ( GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_WRAP_T, GL20.GL_CLAMP_TO_EDGE );
Gdx.gl20.glGenerateMipmap(GL20.GL_TEXTURE_CUBE_MAP);
}
public void render(Camera camera){
//SPECIAL THANKS TO Jos van Egmond
camera.view.getRotation( q, true );
q.conjugate();
///////////////////////////////////
worldTrans.idt();
worldTrans.rotate(quaternion);
shader.begin();
shader.setUniformMatrix(u_worldTrans, worldTrans.translate(0, 0, -1));
quad.render(shader, GL20.GL_TRIANGLES);
shader.end();
}
public Mesh createQuad(){
Mesh mesh = new Mesh(true, 4, 6, VertexAttribute.Position(), VertexAttribute. ColorUnpacked(), VertexAttribute.TexCoords(0));
mesh.setVertices(new float[]
{-1f, -1f, 0, 1, 1, 1, 1, 0, 1,
1f, -1f, 0, 1, 1, 1, 1, 1, 1,
1f, 1f, 0, 1, 1, 1, 1, 1, 0,
-1f, 1f, 0, 1, 1, 1, 1, 0, 0});
mesh.setIndices(new short[] {0, 1, 2, 2, 3, 0});
return mesh;
}
#Override
public void dispose() {
shader.dispose();
quad.dispose();
for(int i=0; i<6; i++)
data[i].dispose();
}
}
How to use it? Just create instance of it:
EnvironmentCubemap envCubemap = new EnvironmentCubemap(Gdx.files.internal("cubemap/pos-x.png"), Gdx.files.internal("cubemap/neg-x.png"),
Gdx.files.internal("cubemap/pos-y.jpg"), Gdx.files.internal("cubemap/neg-y.jpg"),
Gdx.files.internal("cubemap/pos-z.png"), Gdx.files.internal("cubemap/neg-z.png"));
or
EnvironmentCubemap envCubemap = new EnvironmentCubemap(new Pixmap(Gdx.files.internal("cubemap/all_in_one.jpg")));
and then use its render method:
envCubemap.render(camera);
I hope it helps someone else!
The default shader (the glsl files) currently don't support a cubemap. You'll have to provide your own glsl files to use a cubemap. The DefaultShader (the CPU part of the shader that is used by default) will bind the cubemap to the uniform called: u_environmentCubemap. Also, the macro environmentCubemapFlag will be defined by the DefaultShader if the material contains an environment cubemap attribute. Use the following snippet in your shader to use the cubemap:
#ifdef environmentCubemapFlag
uniform samplerCube u_environmentCubemap;
#endif
Here's a relevant example snippet to use cubemap (and normal map): https://github.com/libgdx/libgdx/blob/master/tests/gdx-tests-android/assets/data/g3d/shaders/reflect.glsl
Here's a more advanced example snippet: https://github.com/libgdx/libgdx/blob/master/tests/gdx-tests-android/assets/data/g3d/shaders/test.glsl
You can specify your custom shader like this:
modelBatch = new ModelBatch(Gdx.files.internal("data/vertex.glsl"), Gdx.files.internal("data/fragment.glsl"));
More info about using a custom shader: http://blog.xoppa.com/creating-a-shader-with-libgdx/
I played around the cubemap and created a class which doesn't use native textureCube. Instead of it I created 6 planes and located them around the camera. So, my camera is fixed inside these "walls". This implementation is a bit faster and easier than using cubemap described above.
public class SkyBox implements Disposable{
Matrix4 tranformation;
ShaderProgram program;
int u_projTrans;
int u_worldTrans;
int u_tex;
Texture[] textures;
Mesh quad;
boolean invert = false;
protected String vertexShader =
" attribute vec4 a_position; "+
" attribute vec2 a_texCoord0; "+
" varying vec2 v_texCoord; "+
" uniform mat4 u_worldTrans; "+
" uniform mat4 u_projTrans; "+
" void main() "+
" { "+
" gl_Position = u_projTrans * u_worldTrans * vec4(a_position); "+
" v_texCoord = a_texCoord0; "+
" } ";
protected String fragmentShader =
" #ifdef GL_ES \n"+
" precision mediump float; \n"+
" #endif \n"+
" uniform sampler2D s_diffuse; "+
" varying vec2 v_texCoord; "+
" void main() "+
" { "+
" gl_FragColor = texture2D( s_diffuse, v_texCoord ); "+
" } ";
public String getDefaultVertexShader(){
return vertexShader;
}
public String getDefaultFragmentShader(){
return fragmentShader;
}
public SkyBox (Pixmap positiveX, Pixmap negativeX, Pixmap positiveY, Pixmap negativeY, Pixmap positiveZ, Pixmap negativeZ) {
textures = new Texture[6];
textures[3] = new Texture(positiveX);
textures[2] = new Texture(negativeX);
textures[4] = new Texture(positiveY);
textures[5] = new Texture(negativeY);
textures[0] = new Texture(positiveZ);
textures[1] = new Texture(negativeZ);
positiveX.dispose();
positiveX=null;
negativeX.dispose();
negativeX=null;
positiveY.dispose();
positiveY=null;
negativeY.dispose();
negativeY=null;
positiveZ.dispose();
positiveZ=null;
negativeZ.dispose();
negativeZ=null;
init();
}
public SkyBox (FileHandle positiveX, FileHandle negativeX, FileHandle positiveY, FileHandle negativeY, FileHandle positiveZ, FileHandle negativeZ) {
this(new Pixmap(positiveX), new Pixmap(negativeX), new Pixmap(positiveY), new Pixmap(negativeY), new Pixmap(positiveZ), new Pixmap(negativeZ));
}
public SkyBox (Pixmap cubemap) {
int w = cubemap.getWidth();
int h = cubemap.getHeight();
Pixmap[] data = new Pixmap[6];
for(int i=0; i<6; i++) data[i] = new Pixmap(w/4, h/3, Format.RGB888);
for(int x=0; x<w; x++)
for(int y=0; y<h; y++){
//-X
if(x>=0 && x<=w/4 && y>=h/3 && y<=h*2/3) data[1].drawPixel(x, y-h/3, cubemap.getPixel(x, y));
//+Y
if(x>=w/4 && x<=w/2+1 && y>=0 && y<=h/3) data[2].drawPixel(x-w/4, y, cubemap.getPixel(x, y));
//+Z
if(x>=w/4 && x<=w/2 && y>=h/3 && y<=h*2/3) data[4].drawPixel(x-w/4, y-h/3, cubemap.getPixel(x, y));
//-Y
if(x>=w/4 && x<=w/2 && y>=h*2/3 && y<=h) data[3].drawPixel(x-w/4, y-h*2/3, cubemap.getPixel(x, y));
//+X
if(x>=w/2 && x<=w*3/4 && y>=h/3 && y<=h*2/3) data[0].drawPixel(x-w/2, y-h/3, cubemap.getPixel(x, y));
//-Z
if(x>=w*3/4 && x<=w && y>=h/3 && y<=h*2/3) data[5].drawPixel(x-w*3/4, y-h/3, cubemap.getPixel(x, y));
}
textures = new Texture[6];
textures[0] = new Texture(data[4]);
textures[1] = new Texture(data[5]);
textures[2] = new Texture(data[1]);
textures[3] = new Texture(data[0]);
textures[4] = new Texture(data[2]);
textures[5] = new Texture(data[3]);
for(int i=0; i<6; i++) {
data[i].dispose();
data[i] = null;
}
cubemap.dispose();
cubemap=null;
init();
}
public SkyBox (FileHandle cubemap){
this(new Pixmap(cubemap));
}
public Mesh createTexturedQuad(){
Mesh quad = new Mesh(true, 4, 6, VertexAttribute.Position(), new VertexAttribute(Usage.TextureCoordinates, 2, "a_texCoord0"));
quad.setVertices(new float[]
{-1f, -1f, 0, 0, 1,
1f, -1f, 0, 1, 1,
1f, 1f, 0, 1, 0,
-1f, 1f, 0, 0, 0});
quad.setIndices(new short[] {0, 1, 2, 2, 3, 0});
return quad;
}
public void setInvert(boolean enable){
invert = enable;
}
public void init() {
program = new ShaderProgram(vertexShader, fragmentShader);
if (!program.isCompiled())
throw new GdxRuntimeException(program.getLog());
else Gdx.app.log("shader", "shader compiled successfully!");
u_projTrans = program.getUniformLocation("u_projTrans");
u_worldTrans = program.getUniformLocation("u_worldTrans");
u_tex = program.getUniformLocation("s_diffuse");
tranformation = new Matrix4();
quad = createTexturedQuad();
}
public void render(Camera camera){
Gdx.graphics.getGL20().glCullFace(GL20.GL_BACK);
program.begin();
program.setUniformMatrix(u_projTrans, camera.combined);
//front
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[0].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//left
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.Y, 90);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[ invert ? 3 : 2].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//right
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.Y, -90);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[invert ? 2 : 3].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//bottom
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.X, -90);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[5].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//top
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.X, 90);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[4].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//back
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.Y, 180);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[1].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
program.end();
}
#Override
public void dispose() {
program.dispose();
quad.dispose();
for(int i=0; i<6; i++){
textures[i].dispose();
textures[i]=null;
}
}
}
Using of this class is the same as previous one. Happy coding!
In addition to Nolesh' solution, the skybox can be rotated correctly in respect to the camera rotation like so:
Quaternion q = new Quaternion();
camera.view.getRotation( q, true );
q.conjugate();
envCubemap.render( q );

Creating texture atlas error (opengl ES 2.0, android)

I am trying to create a texture atlas with opengl ES 2.0. I want to bind many little images, in one big image. It works good in the emulator, but it doesnt work with the device.
Here is my code:
gl.glGenTextures(1, textureID, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureID);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bmp, 0);
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), R.drawable.block);
ByteBuffer pixels = ByteBuffer.allocate(bitmap.getWidth() * bitmap.getHeight() * 4);
bitmap.copyPixelsToBuffer(pixels);
gl.glTexSubImage2D(GL10.GL_TEXTURE_2D, 0, 0, 0, bitmap.getWidth(), bitmap.getHeight(), GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, pixels);
The first Bitmap (bmp) is a 256x256 red image, the seconde Bitmap(bitmap) is a 16x16 white image. In the emulator I see a red rectangle with a little white image in it, but with the device I can only see the big red rectangle.
You are using power-of-two textures, which is great, as most implementations of OpenGL ES 2.0 do not support npot textures. Now, because you were able to see the textures on the emulator, it seems that you are not transforming the object (to be rendered) using the projection matrix (MVP matrix). This example shows how to do that -
public class GLES20Renderer implements Renderer {
private int _planeProgram;
private int _planeAPositionLocation;
private int _planeACoordinateLocation;
private int _planeUMVPLocation;
private int _planeUSamplerLocation;
private FloatBuffer _planeVFB;
private FloatBuffer _planeTFB;
private ShortBuffer _planeISB;
private float[] _ViewMatrix = new float[16];
private float[] _ProjectionMatrix = new float[16];
private float[] _MVPMatrix = new float[16];
private int _textureId;
public Context _context;
public GLES20Renderer(Context context) {
_context = context;
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
initplane();
float ratio = (float) width / height;
float zNear = 0.1f;
float zFar = 1000;
float fov = 0.95f; // 0.2 to 1.0
float size = (float) (zNear * Math.tan(fov / 2));
Matrix.setLookAtM(_ViewMatrix, 0, 0, 0, 50, 0, 0, 0, 0, 1, 0);
Matrix.frustumM(_ProjectionMatrix, 0, -size, size, -size / ratio, size / ratio, zNear, zFar);
_planeProgram = loadProgram(_planeVertexShaderCode, _planeFragmentShaderCode);
_planeAPositionLocation = GLES20.glGetAttribLocation(_planeProgram, "aPosition");
_planeACoordinateLocation = GLES20.glGetAttribLocation(_planeProgram, "aCoord");
_planeUMVPLocation = GLES20.glGetUniformLocation(_planeProgram, "uMVP");
_planeUSamplerLocation = GLES20.glGetUniformLocation(_planeProgram, "uSampler");
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
_textureId = textures[0];
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _textureId);
InputStream is1 = _context.getResources().openRawResource(R.drawable.brick);
Bitmap img1;
try {
img1 = BitmapFactory.decodeStream(is1);
} finally {
try {
is1.close();
} catch(IOException e) {
//e.printStackTrace();
}
}
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); // GL_LINEAR
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img1, 0);
}
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
Matrix.multiplyMM(_MVPMatrix, 0, _ProjectionMatrix, 0, _ViewMatrix, 0);
GLES20.glUseProgram(_planeProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _textureId);
GLES20.glUniform1i(_planeUSamplerLocation, 0);
GLES20.glUniformMatrix4fv(_planeUMVPLocation, 1, false, _MVPMatrix, 0);
GLES20.glVertexAttribPointer(_planeAPositionLocation, 3, GLES20.GL_FLOAT, false, 12, _planeVFB);
GLES20.glEnableVertexAttribArray(_planeAPositionLocation);
GLES20.glVertexAttribPointer(_planeACoordinateLocation, 2, GLES20.GL_FLOAT, false, 8, _planeTFB);
GLES20.glEnableVertexAttribArray(_planeACoordinateLocation);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_SHORT, _planeISB);
System.gc();
}
public static void setZAngle(float angle) {
GLES20Renderer._zAngle = angle;
}
public static float getZAngle() {
return GLES20Renderer._zAngle;
}
private void initplane() {
float[] planeVFA = {
10.000000f,-10.000000f,0.000000f,
-10.000000f,-10.000000f,0.000000f,
10.000000f,10.000000f,0.000000f,
-10.000000f,10.000000f,0.000000f,
};
float[] planeTFA = {
// 1,0, 0,0, 1,1, 0,1
1,1, 0,1, 1,0, 0,0
};
short[] planeISA = {
2,3,1,
0,2,1,
};
ByteBuffer planeVBB = ByteBuffer.allocateDirect(planeVFA.length * 4);
planeVBB.order(ByteOrder.nativeOrder());
_planeVFB = planeVBB.asFloatBuffer();
_planeVFB.put(planeVFA);
_planeVFB.position(0);
ByteBuffer planeTBB = ByteBuffer.allocateDirect(planeTFA.length * 4);
planeTBB.order(ByteOrder.nativeOrder());
_planeTFB = planeTBB.asFloatBuffer();
_planeTFB.put(planeTFA);
_planeTFB.position(0);
ByteBuffer planeIBB = ByteBuffer.allocateDirect(planeISA.length * 2);
planeIBB.order(ByteOrder.nativeOrder());
_planeISB = planeIBB.asShortBuffer();
_planeISB.put(planeISA);
_planeISB.position(0);
}
private int loadShader(int type, String source) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
return shader;
}
private int loadProgram(String vertexShaderCode, String fragmentShaderCode) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
return program;
}
private final String _planeVertexShaderCode =
"attribute vec4 aPosition; \n"
+ "attribute vec2 aCoord; \n"
+ "varying vec2 vCoord; \n"
+ "uniform mat4 uMVP; \n"
+ "void main() { \n"
+ " gl_Position = uMVP * aPosition; \n"
+ " vCoord = aCoord; \n"
+ "} \n";
private final String _planeFragmentShaderCode =
"#ifdef GL_FRAGMENT_PRECISION_HIGH \n"
+ "precision highp float; \n"
+ "#else \n"
+ "precision mediump float; \n"
+ "#endif \n"
+ "varying vec2 vCoord; \n"
+ "uniform sampler2D uSampler; \n"
+ "void main() { \n"
+ " gl_FragColor = texture2D(uSampler,vCoord); \n"
+ "} \n";
}
More of these at - http://www.apress.com/9781430250531
If your code is really OpenGL ES 2.0, you should not be using the GL10 wrapper class. I think 1.0 did not support non-power of 2 texture sizes, without using extensions, but I guess you have handled that? You may have found a bug in the OpenGL ES drivers for your device, since it works on the AVD emulator. Try a different device, with a different type of GPU. This article will help:
http://software.intel.com/en-us/articles/porting-opengl-games-to-android-on-intel-atom-processors-part-1

Categories

Resources