LibGDX 0.9.9 - Apply cubemap in environment - java

I am using LibGDX 0.9.9. I am trying to render cubemap and fog. So my code snippet below:
public void show() {
modelBatch = new ModelBatch();
environment = new Environment();
environment.set(new ColorAttribute(ColorAttribute.AmbientLight, 1.0f, 0.4f, 0.4f, 1f));
environment.set(new ColorAttribute(ColorAttribute.Fog, 0.9f, 1f, 0f, 1f));
environment.add(new DirectionalLight().set(0.8f, 0.8f, 0.8f, -1f, -0.8f, -0.2f));
cubemap = new Cubemap(Gdx.files.internal("cubemap/pos-x.png"),
Gdx.files.internal("cubemap/neg-x.png"),
Gdx.files.internal("cubemap/pos-y.png"),
Gdx.files.internal("cubemap/neg-y.png"),
Gdx.files.internal("cubemap/pos-z.png"),
Gdx.files.internal("cubemap/neg-z.png"));
environment.set(new CubemapAttribute(CubemapAttribute.EnvironmentMap, cubemap));
cam = new PerspectiveCamera(67, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
cam.position.set(1f, 1f, 1f);
cam.lookAt(0,0,0);
cam.near = 0.1f;
cam.far = 300f;
cam.update();
ModelLoader loader = new ObjLoader();
model = loader.loadModel(Gdx.files.internal("earth/earth.obj"));
instance = new ModelInstance(model);
NodePart blockPart = model.nodes.get(0).parts.get(0);
renderable = new Renderable();
blockPart.setRenderable(renderable);
renderable.environment = environment;
renderable.worldTransform.idt();
renderContext = new RenderContext(new DefaultTextureBinder(DefaultTextureBinder.WEIGHTED, 1));
shader = new DefaultShader(renderable);
shader.init();
camController = new CameraInputController(cam);
Gdx.input.setInputProcessor(camController);
}
#Override
public void render(float delta) {
camController.update();
Gdx.gl.glViewport(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
Gdx.gl.glClearColor(0, 0, 0, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
renderContext.begin();
shader.begin(cam, renderContext);
shader.render(renderable);
shader.end();
renderContext.end();
}
But nothing happens. I see object only.
What am I doing wrong?

After spending some time, I implemented cube map in LibGDX. Perhaps, it's not ideal solution, but there is nothing more (At least I couldn't find anything). So, I used native OpenGL ES functions and LibGDX. My class is below:
public class EnvironmentCubemap implements Disposable{
protected final Pixmap[] data = new Pixmap[6];
protected ShaderProgram shader;
protected int u_worldTrans;
protected Mesh quad;
private Matrix4 worldTrans;
private Quaternion q;
protected String vertexShader = " attribute vec3 a_position; \n"+
" attribute vec3 a_normal; \n"+
" attribute vec2 a_texCoord0; \n"+
" uniform mat4 u_worldTrans; \n"+
" varying vec2 v_texCoord0; \n"+
" varying vec3 v_cubeMapUV; \n"+
" void main() { \n"+
" v_texCoord0 = a_texCoord0; \n"+
" vec4 g_position = u_worldTrans * vec4(a_position, 1.0); \n"+
" v_cubeMapUV = normalize(g_position.xyz); \n"+
" gl_Position = vec4(a_position, 1.0); \n"+
" } \n";
protected String fragmentShader = "#ifdef GL_ES \n"+
" precision mediump float; \n"+
" #endif \n"+
" uniform samplerCube u_environmentCubemap; \n"+
" varying vec2 v_texCoord0; \n"+
" varying vec3 v_cubeMapUV; \n"+
" void main() { \n"+
" gl_FragColor = vec4(textureCube(u_environmentCubemap, v_cubeMapUV).rgb, 1.0); \n"+
" } \n";
public String getDefaultVertexShader(){
return vertexShader;
}
public String getDefaultFragmentShader(){
return fragmentShader;
}
public EnvironmentCubemap (Pixmap positiveX, Pixmap negativeX, Pixmap positiveY, Pixmap negativeY, Pixmap positiveZ, Pixmap negativeZ) {
data[0]=positiveX;
data[1]=negativeX;
data[2]=positiveY;
data[3]=negativeY;
data[4]=positiveZ;
data[5]=negativeZ;
init();
}
public EnvironmentCubemap (FileHandle positiveX, FileHandle negativeX, FileHandle positiveY, FileHandle negativeY, FileHandle positiveZ, FileHandle negativeZ) {
this(new Pixmap(positiveX), new Pixmap(negativeX), new Pixmap(positiveY), new Pixmap(negativeY), new Pixmap(positiveZ), new Pixmap(negativeZ));
}
//IF ALL SIX SIDES ARE REPRESENTED IN ONE IMAGE
public EnvironmentCubemap (Pixmap cubemap) {
int w = cubemap.getWidth();
int h = cubemap.getHeight();
for(int i=0; i<6; i++) data[i] = new Pixmap(w/4, h/3, Format.RGB888);
for(int x=0; x<w; x++)
for(int y=0; y<h; y++){
//-X
if(x>=0 && x<=w/4 && y>=h/3 && y<=h*2/3) data[1].drawPixel(x, y-h/3, cubemap.getPixel(x, y));
//+Y
if(x>=w/4 && x<=w/2 && y>=0 && y<=h/3) data[2].drawPixel(x-w/4, y, cubemap.getPixel(x, y));
//+Z
if(x>=w/4 && x<=w/2 && y>=h/3 && y<=h*2/3) data[4].drawPixel(x-w/4, y-h/3, cubemap.getPixel(x, y));
//-Y
if(x>=w/4 && x<=w/2 && y>=h*2/3 && y<=h) data[3].drawPixel(x-w/4, y-h*2/3, cubemap.getPixel(x, y));
//+X
if(x>=w/2 && x<=w*3/4 && y>=h/3 && y<=h*2/3) data[0].drawPixel(x-w/2, y-h/3, cubemap.getPixel(x, y));
//-Z
if(x>=w*3/4 && x<=w && y>=h/3 && y<=h*2/3) data[5].drawPixel(x-w*3/4, y-h/3, cubemap.getPixel(x, y));
}
cubemap.dispose();
cubemap=null;
init();
}
private void init(){
shader = new ShaderProgram(vertexShader, fragmentShader);
if (!shader.isCompiled())
throw new GdxRuntimeException(shader.getLog());
u_worldTrans = shader.getUniformLocation("u_worldTrans");
quad = createQuad();
worldTrans = new Matrix4();
q = new Quaternion();
initCubemap();
}
private void initCubemap(){
//bind cubemap
Gdx.gl20.glBindTexture(GL20.GL_TEXTURE_CUBE_MAP, 0);
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, GL20.GL_RGB, data[0].getWidth(), data[0].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[0].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, GL20.GL_RGB, data[1].getWidth(), data[1].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[1].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, GL20.GL_RGB, data[2].getWidth(), data[2].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[2].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, 0, GL20.GL_RGB, data[3].getWidth(), data[3].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[3].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_POSITIVE_Z, 0, GL20.GL_RGB, data[4].getWidth(), data[4].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[4].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, 0, GL20.GL_RGB, data[5].getWidth(), data[5].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[5].getPixels());
//Gdx.gl20.glGenerateMipmap(GL20.GL_TEXTURE_CUBE_MAP);
//Gdx.gl20.glTexParameteri(GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_MIN_FILTER, GL20.GL_LINEAR);
Gdx.gl20.glTexParameteri ( GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_MIN_FILTER,GL20.GL_LINEAR_MIPMAP_LINEAR );
Gdx.gl20.glTexParameteri ( GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_MAG_FILTER,GL20.GL_LINEAR );
Gdx.gl20.glTexParameteri ( GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_WRAP_S, GL20.GL_CLAMP_TO_EDGE );
Gdx.gl20.glTexParameteri ( GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_WRAP_T, GL20.GL_CLAMP_TO_EDGE );
Gdx.gl20.glGenerateMipmap(GL20.GL_TEXTURE_CUBE_MAP);
}
public void render(Camera camera){
//SPECIAL THANKS TO Jos van Egmond
camera.view.getRotation( q, true );
q.conjugate();
///////////////////////////////////
worldTrans.idt();
worldTrans.rotate(quaternion);
shader.begin();
shader.setUniformMatrix(u_worldTrans, worldTrans.translate(0, 0, -1));
quad.render(shader, GL20.GL_TRIANGLES);
shader.end();
}
public Mesh createQuad(){
Mesh mesh = new Mesh(true, 4, 6, VertexAttribute.Position(), VertexAttribute. ColorUnpacked(), VertexAttribute.TexCoords(0));
mesh.setVertices(new float[]
{-1f, -1f, 0, 1, 1, 1, 1, 0, 1,
1f, -1f, 0, 1, 1, 1, 1, 1, 1,
1f, 1f, 0, 1, 1, 1, 1, 1, 0,
-1f, 1f, 0, 1, 1, 1, 1, 0, 0});
mesh.setIndices(new short[] {0, 1, 2, 2, 3, 0});
return mesh;
}
#Override
public void dispose() {
shader.dispose();
quad.dispose();
for(int i=0; i<6; i++)
data[i].dispose();
}
}
How to use it? Just create instance of it:
EnvironmentCubemap envCubemap = new EnvironmentCubemap(Gdx.files.internal("cubemap/pos-x.png"), Gdx.files.internal("cubemap/neg-x.png"),
Gdx.files.internal("cubemap/pos-y.jpg"), Gdx.files.internal("cubemap/neg-y.jpg"),
Gdx.files.internal("cubemap/pos-z.png"), Gdx.files.internal("cubemap/neg-z.png"));
or
EnvironmentCubemap envCubemap = new EnvironmentCubemap(new Pixmap(Gdx.files.internal("cubemap/all_in_one.jpg")));
and then use its render method:
envCubemap.render(camera);
I hope it helps someone else!

The default shader (the glsl files) currently don't support a cubemap. You'll have to provide your own glsl files to use a cubemap. The DefaultShader (the CPU part of the shader that is used by default) will bind the cubemap to the uniform called: u_environmentCubemap. Also, the macro environmentCubemapFlag will be defined by the DefaultShader if the material contains an environment cubemap attribute. Use the following snippet in your shader to use the cubemap:
#ifdef environmentCubemapFlag
uniform samplerCube u_environmentCubemap;
#endif
Here's a relevant example snippet to use cubemap (and normal map): https://github.com/libgdx/libgdx/blob/master/tests/gdx-tests-android/assets/data/g3d/shaders/reflect.glsl
Here's a more advanced example snippet: https://github.com/libgdx/libgdx/blob/master/tests/gdx-tests-android/assets/data/g3d/shaders/test.glsl
You can specify your custom shader like this:
modelBatch = new ModelBatch(Gdx.files.internal("data/vertex.glsl"), Gdx.files.internal("data/fragment.glsl"));
More info about using a custom shader: http://blog.xoppa.com/creating-a-shader-with-libgdx/

I played around the cubemap and created a class which doesn't use native textureCube. Instead of it I created 6 planes and located them around the camera. So, my camera is fixed inside these "walls". This implementation is a bit faster and easier than using cubemap described above.
public class SkyBox implements Disposable{
Matrix4 tranformation;
ShaderProgram program;
int u_projTrans;
int u_worldTrans;
int u_tex;
Texture[] textures;
Mesh quad;
boolean invert = false;
protected String vertexShader =
" attribute vec4 a_position; "+
" attribute vec2 a_texCoord0; "+
" varying vec2 v_texCoord; "+
" uniform mat4 u_worldTrans; "+
" uniform mat4 u_projTrans; "+
" void main() "+
" { "+
" gl_Position = u_projTrans * u_worldTrans * vec4(a_position); "+
" v_texCoord = a_texCoord0; "+
" } ";
protected String fragmentShader =
" #ifdef GL_ES \n"+
" precision mediump float; \n"+
" #endif \n"+
" uniform sampler2D s_diffuse; "+
" varying vec2 v_texCoord; "+
" void main() "+
" { "+
" gl_FragColor = texture2D( s_diffuse, v_texCoord ); "+
" } ";
public String getDefaultVertexShader(){
return vertexShader;
}
public String getDefaultFragmentShader(){
return fragmentShader;
}
public SkyBox (Pixmap positiveX, Pixmap negativeX, Pixmap positiveY, Pixmap negativeY, Pixmap positiveZ, Pixmap negativeZ) {
textures = new Texture[6];
textures[3] = new Texture(positiveX);
textures[2] = new Texture(negativeX);
textures[4] = new Texture(positiveY);
textures[5] = new Texture(negativeY);
textures[0] = new Texture(positiveZ);
textures[1] = new Texture(negativeZ);
positiveX.dispose();
positiveX=null;
negativeX.dispose();
negativeX=null;
positiveY.dispose();
positiveY=null;
negativeY.dispose();
negativeY=null;
positiveZ.dispose();
positiveZ=null;
negativeZ.dispose();
negativeZ=null;
init();
}
public SkyBox (FileHandle positiveX, FileHandle negativeX, FileHandle positiveY, FileHandle negativeY, FileHandle positiveZ, FileHandle negativeZ) {
this(new Pixmap(positiveX), new Pixmap(negativeX), new Pixmap(positiveY), new Pixmap(negativeY), new Pixmap(positiveZ), new Pixmap(negativeZ));
}
public SkyBox (Pixmap cubemap) {
int w = cubemap.getWidth();
int h = cubemap.getHeight();
Pixmap[] data = new Pixmap[6];
for(int i=0; i<6; i++) data[i] = new Pixmap(w/4, h/3, Format.RGB888);
for(int x=0; x<w; x++)
for(int y=0; y<h; y++){
//-X
if(x>=0 && x<=w/4 && y>=h/3 && y<=h*2/3) data[1].drawPixel(x, y-h/3, cubemap.getPixel(x, y));
//+Y
if(x>=w/4 && x<=w/2+1 && y>=0 && y<=h/3) data[2].drawPixel(x-w/4, y, cubemap.getPixel(x, y));
//+Z
if(x>=w/4 && x<=w/2 && y>=h/3 && y<=h*2/3) data[4].drawPixel(x-w/4, y-h/3, cubemap.getPixel(x, y));
//-Y
if(x>=w/4 && x<=w/2 && y>=h*2/3 && y<=h) data[3].drawPixel(x-w/4, y-h*2/3, cubemap.getPixel(x, y));
//+X
if(x>=w/2 && x<=w*3/4 && y>=h/3 && y<=h*2/3) data[0].drawPixel(x-w/2, y-h/3, cubemap.getPixel(x, y));
//-Z
if(x>=w*3/4 && x<=w && y>=h/3 && y<=h*2/3) data[5].drawPixel(x-w*3/4, y-h/3, cubemap.getPixel(x, y));
}
textures = new Texture[6];
textures[0] = new Texture(data[4]);
textures[1] = new Texture(data[5]);
textures[2] = new Texture(data[1]);
textures[3] = new Texture(data[0]);
textures[4] = new Texture(data[2]);
textures[5] = new Texture(data[3]);
for(int i=0; i<6; i++) {
data[i].dispose();
data[i] = null;
}
cubemap.dispose();
cubemap=null;
init();
}
public SkyBox (FileHandle cubemap){
this(new Pixmap(cubemap));
}
public Mesh createTexturedQuad(){
Mesh quad = new Mesh(true, 4, 6, VertexAttribute.Position(), new VertexAttribute(Usage.TextureCoordinates, 2, "a_texCoord0"));
quad.setVertices(new float[]
{-1f, -1f, 0, 0, 1,
1f, -1f, 0, 1, 1,
1f, 1f, 0, 1, 0,
-1f, 1f, 0, 0, 0});
quad.setIndices(new short[] {0, 1, 2, 2, 3, 0});
return quad;
}
public void setInvert(boolean enable){
invert = enable;
}
public void init() {
program = new ShaderProgram(vertexShader, fragmentShader);
if (!program.isCompiled())
throw new GdxRuntimeException(program.getLog());
else Gdx.app.log("shader", "shader compiled successfully!");
u_projTrans = program.getUniformLocation("u_projTrans");
u_worldTrans = program.getUniformLocation("u_worldTrans");
u_tex = program.getUniformLocation("s_diffuse");
tranformation = new Matrix4();
quad = createTexturedQuad();
}
public void render(Camera camera){
Gdx.graphics.getGL20().glCullFace(GL20.GL_BACK);
program.begin();
program.setUniformMatrix(u_projTrans, camera.combined);
//front
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[0].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//left
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.Y, 90);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[ invert ? 3 : 2].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//right
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.Y, -90);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[invert ? 2 : 3].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//bottom
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.X, -90);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[5].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//top
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.X, 90);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[4].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//back
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.Y, 180);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[1].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
program.end();
}
#Override
public void dispose() {
program.dispose();
quad.dispose();
for(int i=0; i<6; i++){
textures[i].dispose();
textures[i]=null;
}
}
}
Using of this class is the same as previous one. Happy coding!

In addition to Nolesh' solution, the skybox can be rotated correctly in respect to the camera rotation like so:
Quaternion q = new Quaternion();
camera.view.getRotation( q, true );
q.conjugate();
envCubemap.render( q );

Related

JOGL shadow mapping

I am translating OpenGL SuperBible demos into Java using JOGL. All the demos up until chapter 12 went well, but now I am stuck at shadow mapping. Here is my setup:
public class ShadowMap implements GLEventListener
{
public static void main(String[] args)
{
ShadowMap rend = new ShadowMap();
window.NEWTWindowStarter.init(rend, null, null);
}
private int light_program, camera_program;
private int[] obj, bkg;
private int camera_mv_pointer, camera_proj_pointer, light_mvp_pointer, camera_shadow_pointer;
private int fbo, depth_tex;
private int screen_width, screen_height;
private float aspect;
private double[] light_position, camera_position;
private double[][] light_proj_matrix, light_view_matrix;
private double[][] camera_proj_matrix, camera_view_matrix;
private double[][] bias_matrix, shadow_matrix;
private double[][] obj_model_matrix, bkg_model_matrix;
#Override
public void init(GLAutoDrawable glAutoDrawable)
{
GL4 gl = glAutoDrawable.getGL().getGL4bc();
ObjectParser parser = new ObjectParser("pawn_s.obj");
obj = BufferController.prepareVAO(gl, parser.getDataHolder());
parser = new ObjectParser("bkg.obj");
bkg = BufferController.prepareVAO(gl, parser.getDataHolder());
light_program = ShaderController.init(gl, "shaders/demo/d25/light_v.glsl", "shaders/demo/d25/light_f.glsl");
light_mvp_pointer = gl.glGetUniformLocation(light_program, "mvp");
camera_program = ShaderController.init(gl, "shaders/demo/d25/camera_v.glsl", "shaders/demo/d25/camera_f.glsl");
camera_mv_pointer = gl.glGetUniformLocation(camera_program, "mv_matrix");
camera_proj_pointer = gl.glGetUniformLocation(camera_program, "proj_matrix");
camera_shadow_pointer = gl.glGetUniformLocation(camera_program, "shadow_matrix");
depth_tex = BufferController.generateTextureId(gl);
gl.glBindTexture(gl.GL_TEXTURE_2D, depth_tex);
gl.glTexStorage2D(gl.GL_TEXTURE_2D, 11, gl.GL_DEPTH_COMPONENT32F, 1024, 1024);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP_TO_BORDER);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP_TO_BORDER);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_COMPARE_MODE, gl.GL_COMPARE_REF_TO_TEXTURE);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_COMPARE_FUNC, gl.GL_LEQUAL);
gl.glActiveTexture(gl.GL_TEXTURE0);
gl.glBindTexture(gl.GL_TEXTURE_2D, depth_tex);
fbo = BufferController.generateFrameId(gl);
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, fbo);
gl.glFramebufferTexture(gl.GL_FRAMEBUFFER, gl.GL_DEPTH_ATTACHMENT, depth_tex, 0);
gl.glDrawBuffer(gl.GL_NONE);
gl.glReadBuffer(gl.GL_NONE);
if(gl.glCheckFramebufferStatus(gl.GL_FRAMEBUFFER) == gl.GL_FRAMEBUFFER_COMPLETE)
System.out.println("FrameBuffer OK!");
else
System.out.println("FrameBuffer FAIL!");
gl.glBindTexture(gl.GL_TEXTURE_2D, 0);
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0);
gl.glEnable(gl.GL_DEPTH_TEST);
}
#Override
public void dispose(GLAutoDrawable glAutoDrawable){}
#Override
public void display(GLAutoDrawable glAutoDrawable)
{
GL4bc gl = glAutoDrawable.getGL().getGL4bc();
computerMatrices();
gl.glEnable(gl.GL_DEPTH_TEST);
// depth pass
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, fbo);
gl.glViewport(0, 0, 1024, 1024);
gl.glUseProgram(light_program);
drawFromLight(gl, obj, obj_model_matrix);
drawFromLight(gl, bkg, bkg_model_matrix);
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0);
// final pass
gl.glViewport(0, 0, screen_width, screen_height);
gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT);
gl.glBindTexture(gl.GL_TEXTURE_2D, depth_tex);
gl.glDrawBuffer(gl.GL_BACK);
gl.glUseProgram(camera_program);
drawFromCamera(gl, obj, obj_model_matrix);
drawFromCamera(gl, bkg, bkg_model_matrix);
gl.glBindTexture(gl.GL_TEXTURE_2D, 0);
}
private void computerMatrices()
{
bkg_model_matrix = Matrix.model(0, 0, 0, 0, 0, 0, 1);
obj_model_matrix = Matrix.model(0, 0, 0, 0, 0, 0, 1);
light_position = new double[]{10, 10, 10};
camera_position = new double[]{10, 0, 10};
light_proj_matrix = Matrix.frustum(-1.0, 1.0, -1.0, 1.0, 1.0, 200.0);
light_view_matrix = Matrix.lookAt(light_position, new double[]{0, 0, 0}, new double[]{0, 1, 0});
camera_proj_matrix = Matrix.perspective(60, aspect, 0.1f, 1000f);
camera_view_matrix = Matrix.lookAt(camera_position, new double[]{0, 0, 0}, new double[]{0, 1, 0});
bias_matrix = new double[][]{
{0.5, 0.0, 0.0, 0.0},
{0.0, 0.5, 0.0, 0.0},
{0.0, 0.0, 0.5, 0.0},
{0.5, 0.5, 0.5, 1.0}};
shadow_matrix = Matrix.mult(bias_matrix, light_proj_matrix, light_view_matrix);
}
private void drawFromLight(GL4bc gl, int[] obj, double[][] model_matrix)
{
gl.glUniformMatrix4fv(light_mvp_pointer, 1, false, Matrix.toArrayF(Matrix.mult(light_proj_matrix, light_view_matrix, model_matrix)), 0);
BufferControllerDep.renderTrianglesVAO(gl, obj[0], obj[1], obj[2]);
}
private void drawFromCamera(GL4bc gl, int[] obj, double[][] model_matrix)
{
gl.glUniformMatrix4fv(camera_shadow_pointer, 1, false, Matrix.toArrayF(Matrix.mult(shadow_matrix, model_matrix)), 0);
gl.glUniformMatrix4fv(camera_proj_pointer, 1, false, Matrix.toArrayF(camera_proj_matrix), 0);
gl.glUniformMatrix4fv(camera_mv_pointer, 1, false, Matrix.toArrayF(Matrix.mult(camera_view_matrix, model_matrix)), 0);
BufferControllerDep.renderTrianglesVAO(gl, obj[0], obj[1], obj[2]);
}
#Override
public void reshape(GLAutoDrawable glAutoDrawable, int x, int y, int width, int height)
{
screen_width = width;
screen_height = height;
aspect = (1.0f * screen_width) / screen_height;
}
}
Light shaders are very basic. Camera Vertex shader is as follows:
#version 430 core
uniform mat4 mv_matrix;
uniform mat4 proj_matrix;
uniform mat4 shadow_matrix;
layout (location = 0) in vec4 position;
layout (location = 1) in vec3 normal;
out vec4 shadow_coord;
uniform vec3 light_pos = vec3(10.0, 10.0, 10.0);
out vec3 N;
out vec3 L;
void main(void)
{
N = normalize(mat3(mv_matrix) * normal);
L = normalize(light_pos - (mv_matrix * position).xyz);
shadow_coord = shadow_matrix * position;
gl_Position = proj_matrix * mv_matrix * position;
}
And camera Fragment shader:
#version 430 core
layout (location = 0) out vec4 color;
layout (binding = 0) uniform sampler2DShadow shadow_tex;
in vec4 shadow_coord;
uniform vec3 D = vec3(0.9, 0.8, 1.0);
in vec3 N;
in vec3 L;
void main(void)
{
vec3 diffuse = dot(N, L) * D;
float shadow = textureProj(shadow_tex, shadow_coord);
// color = vec4(diffuse, 1.0);
color = shadow * vec4(1);
}
Just rendering diffuse works fine. Projection, LookAt, and Transformation matrices seem to work.
However when trying to render shadows, it fails:
Moving light closer to the object produces this:
I use my own Matrix and Vector classes and can provide source code if needed.
Any insight is appreciated. Thank you!
The problem was caused by the reverse matrix multiplication order in Java code. Here is corrected code:
private void drawFromLight(GL4bc gl, int[] obj, double[][] model_matrix)
{
gl.glUniformMatrix4fv(light_mvp_pointer, 1, false,
Matrix.toArrayF(Matrix.mult(model_matrix, light_view_matrix, light_proj_matrix)), 0);
BufferControllerDep.renderTrianglesVAO(gl, obj[0], obj[1], obj[2]);
}
private void drawFromCamera(GL4bc gl, int[] obj, double[][] model_matrix)
{
gl.glUniformMatrix4fv(camera_shadow_pointer, 1, false,
Matrix.toArrayF(Matrix.mult(model_matrix, shadow_matrix)), 0);
gl.glUniformMatrix4fv(camera_mv_pointer, 1, false,
Matrix.toArrayF(Matrix.mult(model_matrix, camera_view_matrix)), 0);
gl.glUniformMatrix4fv(camera_mvp_pointer, 1, false,
Matrix.toArrayF(Matrix.mult(model_matrix, camera_view_matrix, camera_proj_matrix)), 0);
BufferControllerDep.renderTrianglesVAO(gl, obj[0], obj[1], obj[2]);
}
Using the same shaders this will produce following result:
Also adding
gl.glEnable(gl.GL_POLYGON_OFFSET_FILL);
gl.glPolygonOffset(2.0f, 0.0f);
before performing "light pass" gets rid of distortion:
Mixing shadows and diffuse produces this:
There is still room for improvement however...

Rendering fails with no GLErrors Android

I am running on the same issue for 2 days now. I am trying to render the usual 2D triangle (in 3D space) in OpenGL ES 2.0, on android (minApi 15, target 23). I have rewritten the entire code twice, and i get always the same results. No OpenGL Errors or warnings, no Java exceptions, and the screen is just black....
Anyway here's the code for the renderer class, the activity just creates the renderer and calls start() in the onCreate().
import static android.opengl.GLES20.*;
public class renderer implements GLSurfaceView.Renderer {
private Activity a;
private GLSurfaceView v;
private int buffs[];
private int prg;
private static boolean checkGLError(){
int i = glGetError();
if(i != GL_NO_ERROR){
Log.e("GLError", "Error: " + i);
return true;
}
return false;
}
private String VShaderSource =
"precision highp float;\n" +
"\n" +
"attribute mat4 mVP;\n" +
"\n" +
"attribute vec3 vPos;\n" +
"attribute vec3 col;\n" +
"\n" +
"varying vec3 oCol;\n" +
"\n" +
"void main(){\n" +
" gl_Position = mVP * vec4(vPos, 1.0);\n" +
" oCol = col;\n" +
"}";
private String FShaderSource =
"precision mediump float;\n" +
"\n" +
"varying vec3 oCol;\n" +
"\n" +
"void main(){\n" +
" gl_FragColor = vec4(oCol, 255);\n" +
"}";
public renderer(Activity a){
this.a = a;
}
public void start(){
v = new GLSurfaceView(this.a);
v.setEGLContextClientVersion(2);
v.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
v.setRenderer(this);
v.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
a.setContentView(v);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.w("OGL Renderer", "OnSUfraceCreated Called!");
int vs = glCreateShader(GL_VERTEX_SHADER);
int fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(vs, VShaderSource);
glCompileShader(vs);
int var[] = new int[1];
glGetShaderiv(vs, GL_COMPILE_STATUS, var, 0);
if(var[0] != GL_TRUE){
Log.e("OGL Shader", "VShader error: " + glGetShaderInfoLog(vs));
}else{
Log.w("OGL Shader", "VShader compiled successfully");
}
glShaderSource(fs, FShaderSource);
glCompileShader(fs);
glGetShaderiv(fs, GL_COMPILE_STATUS, var, 0);
if(var[0] != GL_TRUE){
Log.e("OGL Shader", "FShader error: " + glGetShaderInfoLog(fs));
}else{
Log.w("OGL Shader", "FShader compiled successfully");
}
prg = glCreateProgram();
glAttachShader(prg, vs);
glAttachShader(prg, fs);
glLinkProgram(prg);
glGetProgramiv(prg, GL_LINK_STATUS, var, 0);
if(var[0] != GL_TRUE){
Log.e("OGL Shader", "Linker error: " + glGetProgramInfoLog(prg));
}else{
Log.w("OGL Shader", "Program compiled successfully");
}
glUseProgram(prg);
if(checkGLError()){
Log.e("Error", "GLError during Program initialization");
}
float[] vertices = {-100, -100, -1,
100, -100, -1,
0, 100, -1};
ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
vbb.order(ByteOrder.nativeOrder());
FloatBuffer vFb = vbb.asFloatBuffer();
vFb.put(vertices);
vFb.position(0);
float[] colors = {1, 0, 0,
0, 1, 0,
0, 0, 1};
ByteBuffer cbb = ByteBuffer.allocateDirect(colors.length * 4);
cbb.order(ByteOrder.nativeOrder());
FloatBuffer cFb = cbb.asFloatBuffer();
cFb.put(colors);
cFb.position(0);
buffs = new int[2];
glGenBuffers(2, buffs, 0);
if(checkGLError()){
Log.e("Error", "GLError during Buffer generation");
}
glBindBuffer(GL_ARRAY_BUFFER, buffs[0]);
glBufferData(GL_ARRAY_BUFFER, vFb.capacity(), vFb, GL_STATIC_DRAW);
if(checkGLError()){
Log.e("Error", "GLError during GLBufferData 1");
}
glBindBuffer(GL_ARRAY_BUFFER, buffs[1]);
glBufferData(GL_ARRAY_BUFFER, cFb.capacity(), cFb, GL_STATIC_DRAW);
if(checkGLError()){
Log.e("Error", "GLError during GLBufferData 2");
}
int mPos = glGetUniformLocation(prg, "mVP");
float[] projection = new float[16], view = new float[16], mVP = new float[16];
Matrix.setIdentityM(projection, 0);
Matrix.setLookAtM(view, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
Matrix.perspectiveM(projection, 0, 90.0f, 16.0f/9.0f, 0.000001f, 1000.0f);
Matrix.multiplyMM(mVP, 0, projection, 0, view, 0);
glUniformMatrix4fv(mPos, 1, false, mVP, 0);
if(checkGLError()){
Log.e("Error", "GLError during matrix load");
}
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
glViewPort(0, 0, width, height);
}
#Override
public void onDrawFrame(GL10 gl) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
int vPos, cPos;
vPos = glGetAttribLocation(prg, "vPos");
if(checkGLError()){
Log.e("Error", "GLError during GetAttribLocation 1");
}
glEnableVertexAttribArray(vPos);
cPos = glGetAttribLocation(prg, "col");
if(checkGLError()){
Log.e("Error", "GLError during GetAttribLocation 2");
}
glEnableVertexAttribArray(cPos);
glBindBuffer(GL_ARRAY_BUFFER, buffs[0]);
glVertexAttribPointer(vPos, 3, GL_FLOAT, false, 0, 0);
if(checkGLError()){
Log.e("Error", "GLError during VertexAttribPointer 1");
}
glBindBuffer(GL_ARRAY_BUFFER, buffs[1]);
glVertexAttribPointer(cPos, 3, GL_FLOAT, false, 0, 0);
if(checkGLError()){
Log.e("Error", "GLError during VertexAttribPointer 2");
}
glDrawArrays(GL_TRIANGLES, 0, 1);
if(checkGLError()){
Log.e("Error", "GLError during DrawArrays");
}
}
}
Any help is appreciated.
-John
EDIT I copied the wrong vertex Z positions, it did not change anything tough
EDIT 2 Added the glEnableVertexArrtibArray() calls, still not working
I think I found the issue. I was creating a very large triangle, using smaller points (1.0f ~ 10.0f) solved the issue. Also, moved the view matrix a bit further away.

Animating Multiple sprites in Android OpenGL ES 2.0

I've spent days searching, trying tutorials, and not actually getting results in this, so here I am.
I'm trying, simply put, to animate a collection of objects (Android Studio) on the screen, in a 2D format, with each independent movements and rotations. However, when I try this, I'm either not getting the object rendered, or its rendering skewed (as if rotated through the vertical Y-axis)
I know the importance of the order in which objects are drawn too (to give correct Z-ordering appearance) however, I'm at a bit of a loss with the matrix manipulation.
Here is what I have so far:
Main Activity - standard stuff
private GLSurfaceView mGLSurfaceView;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mGLSurfaceView = new GLSurfaceView(this);
//check if device supports ES 2.0
final ActivityManager activityManager = (ActivityManager) getSystemService(Context.ACTIVITY_SERVICE);
final ConfigurationInfo configurationInfo = activityManager.getDeviceConfigurationInfo();
final boolean supportsEs2 = configurationInfo.reqGlEsVersion >= 0x20000;
if (supportsEs2) {
//Get the ES2 compatible context
mGLSurfaceView.setEGLContextClientVersion(2);
//set renderer to my renderer below
mGLSurfaceView.setRenderer(new MyGL20Renderer(this));
} else {
//no support
return;
}
//setContentView(R.layout.activity_main);
setContentView(mGLSurfaceView);
}
GL20Renderer class - Notice I'm now just manually adding 2 objects to my collection to render
public class MyGL20Renderer implements GLSurfaceView.Renderer
{
private final Context mActivityContext;
//Matrix Initializations
private final float[] mMVPMatrix = new float[16];
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private float[] mRotationMatrix = new float[16];
private final float[] mRotateMatrix = new float[16];
private final float[] mMoveMatrix = new float[16];
private final float[] mTempMatrix = new float[16];
private final float[] mModelMatrix = new float[16];
private int numObjects = 2;
private ArrayList<Sprite> spriteList = new ArrayList<Sprite>();
//Declare as volatile because we are updating it from another thread
public volatile float mAngle;
//private Triangle triangle;
//private Sprite sprite;
public MyGL20Renderer(final Context activityContext)
{
mActivityContext = activityContext;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config)
{
//Set the background frame color
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
//Set the camera position (View Matrix) //mtx, offset, eyex,y,z, centrex,y,z, upx,y,z
Matrix.setLookAtM(mVMatrix, 0,
0, 0, -1.5f, //Eye XYZ - position eye behind the origin
0f, 0f, -5.0f, //Look XYZ - We are looking toward the distance
0f, 1.0f, 0.0f); //Up XYZ - Up vector - where head would be pointing if holding the camera
//Initialize Shapes
//triangle = new Triangle();
//sprite = new Sprite(mActivityContext);
//Sprite newSprite;
float xMax = 2.0f;
float yMax = 2.0f;
//rand = 0->1
float newX = (new Random().nextFloat() * xMax * 2) - xMax; //2.0f; //-2 -> +2
float newY = (new Random().nextFloat() * yMax * 2) - yMax; //-3 -> +3
float newZ = 0f;
//for (int i=0; i<numObjects; i++) {
//newSprite = new Sprite(mActivityContext);
//spriteList.add(new Sprite(mActivityContext, newX, newY, newZ));
//}
spriteList.add(new Sprite(mActivityContext, -0.0f, -0.0f, 0.0f));
spriteList.add(new Sprite(mActivityContext, +0.5f, -0.5f, 0.0f));
//spriteList.add(new Sprite(mActivityContext, -1.0f, +1.0f, 0.0f));
//spriteList.add(new Sprite(mActivityContext, +1.0f, +1.0f, 0.0f));
}
public void onDrawFrame(GL10 unused)
{
//init
Sprite currSprite;
//Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
//timing
float jFactor = 0.1f;
long time = SystemClock.uptimeMillis() % 10000L;
float angleInDegrees = (360.0f / 1000.0f) * ((int) time) * jFactor;
/*
//number 1
//Matrix.setIdentityM(mModelMatrix, 0);
//currSprite = spriteList.get(0);
//Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
//currSprite.Draw(mModelMatrix);
//number 2
Matrix.setIdentityM(mModelMatrix, 0);
currSprite = spriteList.get(1);
Matrix.translateM(mModelMatrix, 0, 0.0f, -0.1f, 0.0f);
//Matrix.rotateM(mModelMatrix, 0, 90.0f, 1.0f, 0.0f, 0.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
currSprite.Draw(mModelMatrix);
//Matrix.translateM(mModelMatrix, 0, 0, 0, 4.0f);
*/
//Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
//zoom out a bit?
Matrix.translateM(mMVPMatrix, 0, 0, 0, 4.0f);
//number 1
//currSprite = spriteList.get(0);
//Matrix.setIdentityM(mMVPMatrix, 0);
//Matrix.rotateM(mMVPMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
//Matrix.translateM(mMVPMatrix, 0, currSprite.coordX, 0.0f, 0.0f);
//currSprite.coordX += 0.01f;
//currSprite.Draw(mMVPMatrix);
//number 2
currSprite = spriteList.get(0);
Matrix.setIdentityM(mMVPMatrix, 0);
Matrix.translateM(mMVPMatrix, 0, 0.0f, 0.0f, 0.0f);
Matrix.rotateM(mMVPMatrix, 0, angleInDegrees, 0.0f, 0.0f, +1.0f);
//float[] mTempMatrix = new float[16];
//mTempMatrix = mModelMatrix.clone();
//Matrix.multiplyMM(mMVPMatrix, 0, mMVPMatrix, 0, mRotateMatrix, 0);
//mTempMatrix = mMVPMatrix.clone();
//Matrix.multiplyMM(mMVPMatrix, 0, mTempMatrix, 0, mModelMatrix, 0);
//Matrix.setIdentityM(mMVPMatrix, 0);
currSprite.Draw(mMVPMatrix);
/*
//Set the camera position (View Matrix) //mtx, offset, eyex,y,z, centrex,y,z, upx,y,z
Matrix.setLookAtM(mVMatrix, 0,
0, 0, -10,
0f, 0f, 0f,
0f, 1.0f, 0.0f);
//Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
//zoom out a bit?
Matrix.translateM(mMVPMatrix, 0, 0, 0, 4.0f);
for (int i=0; i<numObjects; i++) {
//Create a rotation transformation for the triangle
//Matrix.setRotateM(mRotationMatrix, 0, mAngle, 0, 0, -1.0f);
Matrix.setRotateM(mRotationMatrix, 0, 0, 0, 0, -1.0f); //-1.0 = Z, for some reason need this. Grr
//Combine the rotation matrix with the projection and camera view
Matrix.multiplyMM(mMVPMatrix, 0, mRotationMatrix, 0, mMVPMatrix, 0);
//Draw Shape
//triangle.Draw(mMVPMatrix);
//sprite.Draw(mMVPMatrix);
currSprite = spriteList.get(i);
//Move the object to the passed initial coordinates?
//Matrix.translateM(mMVPMatrix, 0, currSprite.coordX, currSprite.coordY, currSprite.coordZ);
currSprite.Draw(mMVPMatrix);
}
*/
}
public void onSurfaceChanged(GL10 unused, int width, int height)
{
GLES20.glViewport(0, 0, width, height);
if (height == 0) {
height = 1; //incase of div 0 errors
}
float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 10.f;
//This Projection Matrix is applied to object coordinates in the onDrawFrame() method
//Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
Matrix.frustumM(mProjMatrix, 0, left, right, bottom, top, near, far);
}
public static int loadShader(int type, String shaderCode)
{
//Create a Vertex Shader Type Or a Fragment Shader Type (GLES20.GL_VERTEX_SHADER OR GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
//Add The Source Code and Compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
Please excuse the commented code in OnDrawFrame() where I've been experimenting, and failing.
Sprite Class
public class Sprite
{
//Reference to Activity Context
private final Context mActivityContext;
//Added for Textures
private final FloatBuffer mCubeTextureCoordinates;
private int mTextureUniformHandle;
private int mTextureCoordinateHandle;
private final int mTextureCoordinateDataSize = 2;
private int mTextureDataHandle;
private final String vertexShaderCode =
//Test
"attribute vec2 a_TexCoordinate;" +
"varying vec2 v_TexCoordinate;" +
//End Test
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition * uMVPMatrix;" +
//Test
"v_TexCoordinate = a_TexCoordinate;" +
//End Test
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
//Test
"uniform sampler2D u_Texture;" +
"varying vec2 v_TexCoordinate;" +
//End Test
"void main() {" +
//"gl_FragColor = vColor;" +
"gl_FragColor = (vColor * texture2D(u_Texture, v_TexCoordinate));" +
"}";
private final int shaderProgram;
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
public float coordX;
public float coordY;
//public float coordZ;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 2;
static float spriteCoords[] = { -0.5f, 0.5f, // top left
-0.5f, -0.5f, // bottom left
0.5f, -0.5f, // bottom right
0.5f, 0.5f }; //top right
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; //Order to draw vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; //Bytes per vertex
// Set color with red, green, blue and alpha (opacity) values
//float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };
float color[] = { 1f, 1f, 1f, 1.0f };
public Sprite(final Context activityContext, float initX, float initY, float initZ)
{
mActivityContext = activityContext;
this.coordX = initX;
this.coordY = initY;
//this.coordZ = initZ;
//ergh - will do manually for now. Paxo n00b
//just a 2D array, no need for Z nonsense
for (int i=0; i<spriteCoords.length; i++) {
spriteCoords[i] -= (i%2==0) ? coordX : coordY; //- works better than +
}
//float newPosMatrix[] = { initX, initY, 0f };
//adjust the vector coords accordingly
//Matrix.multiplyMV(spriteCoords, 0, newPosMatrix, 0, spriteCoords, 0);
//Initialize Vertex Byte Buffer for Shape Coordinates / # of coordinate values * 4 bytes per float
ByteBuffer bb = ByteBuffer.allocateDirect(spriteCoords.length * 4);
//Use the Device's Native Byte Order
bb.order(ByteOrder.nativeOrder());
//Create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
//Add the coordinates to the FloatBuffer
vertexBuffer.put(spriteCoords);
//Set the Buffer to Read the first coordinate
vertexBuffer.position(0);
// S, T (or X, Y)
// Texture coordinate data.
// Because images have a Y axis pointing downward (values increase as you move down the image) while
// OpenGL has a Y axis pointing upward, we adjust for that here by flipping the Y axis.
// What's more is that the texture coordinates are the same for every face.
final float[] cubeTextureCoordinateData =
{
//Front face
/*0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f*/
/*-0.5f, 0.5f,
-0.5f, -0.5f,
0.5f, -0.5f,
0.5f, 0.5f*/
0f, 1f,
0f, 0f,
1f, 0f,
1f, 1f
};
mCubeTextureCoordinates = ByteBuffer.allocateDirect(cubeTextureCoordinateData.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubeTextureCoordinates.put(cubeTextureCoordinateData).position(0);
//Initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(spriteCoords.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
int vertexShader = MyGL20Renderer.loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = MyGL20Renderer.loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
shaderProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(shaderProgram, vertexShader);
GLES20.glAttachShader(shaderProgram, fragmentShader);
//Texture Code
GLES20.glBindAttribLocation(shaderProgram, 0, "a_TexCoordinate");
GLES20.glLinkProgram(shaderProgram);
//Load the texture
mTextureDataHandle = loadTexture(mActivityContext, R.drawable.cube);
}
public void Draw(float[] mvpMatrix)
{
//Add program to OpenGL ES Environment
GLES20.glUseProgram(shaderProgram);
//Get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(shaderProgram, "vPosition");
//Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
//Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
//Get Handle to Fragment Shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(shaderProgram, "vColor");
//Set the Color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
//Set Texture Handles and bind Texture
mTextureUniformHandle = GLES20.glGetAttribLocation(shaderProgram, "u_Texture");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(shaderProgram, "a_TexCoordinate");
//Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
//Bind the texture to this unit.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
//Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
//Pass in the texture coordinate information
mCubeTextureCoordinates.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false, 0, mCubeTextureCoordinates);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
//Get Handle to Shape's Transformation Matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(shaderProgram, "uMVPMatrix");
//Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
//glTranslatef(0f, 0f, 0f);
//Draw the triangle
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
//Disable Vertex Array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
public static int loadTexture(final Context context, final int resourceId)
{
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0)
{
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling
// Read in the resource
final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
}
Now, I don't know if I'm going about this the right way at all, but I simply want to just animate the collection of Sprite objects in spriteList.
More specifically, have a collection of 3 objects and then respond to screen touch, and animate the objects to that location (but that will come later)
Initially, I just want to be able to correctly render these objects (with initial locations) and then rotate them on the centre point (about the Z axis).
For some reason, TranslateM is warping the texture (as if about the Y axis) and not actually moving an object along the X/Y planes
Many thanks for any help you can offer. As you can see I'm fairly new to OpenGL and have had little luck with the limited tutorials out there that support Android Studio and GLES2.0.
Kind regards,
James
I think the problem is that you have not multiplied the translation matrices into your rotation matrices. A matrix multiply is required to combine those.

Creating texture atlas error (opengl ES 2.0, android)

I am trying to create a texture atlas with opengl ES 2.0. I want to bind many little images, in one big image. It works good in the emulator, but it doesnt work with the device.
Here is my code:
gl.glGenTextures(1, textureID, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureID);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bmp, 0);
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), R.drawable.block);
ByteBuffer pixels = ByteBuffer.allocate(bitmap.getWidth() * bitmap.getHeight() * 4);
bitmap.copyPixelsToBuffer(pixels);
gl.glTexSubImage2D(GL10.GL_TEXTURE_2D, 0, 0, 0, bitmap.getWidth(), bitmap.getHeight(), GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, pixels);
The first Bitmap (bmp) is a 256x256 red image, the seconde Bitmap(bitmap) is a 16x16 white image. In the emulator I see a red rectangle with a little white image in it, but with the device I can only see the big red rectangle.
You are using power-of-two textures, which is great, as most implementations of OpenGL ES 2.0 do not support npot textures. Now, because you were able to see the textures on the emulator, it seems that you are not transforming the object (to be rendered) using the projection matrix (MVP matrix). This example shows how to do that -
public class GLES20Renderer implements Renderer {
private int _planeProgram;
private int _planeAPositionLocation;
private int _planeACoordinateLocation;
private int _planeUMVPLocation;
private int _planeUSamplerLocation;
private FloatBuffer _planeVFB;
private FloatBuffer _planeTFB;
private ShortBuffer _planeISB;
private float[] _ViewMatrix = new float[16];
private float[] _ProjectionMatrix = new float[16];
private float[] _MVPMatrix = new float[16];
private int _textureId;
public Context _context;
public GLES20Renderer(Context context) {
_context = context;
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
initplane();
float ratio = (float) width / height;
float zNear = 0.1f;
float zFar = 1000;
float fov = 0.95f; // 0.2 to 1.0
float size = (float) (zNear * Math.tan(fov / 2));
Matrix.setLookAtM(_ViewMatrix, 0, 0, 0, 50, 0, 0, 0, 0, 1, 0);
Matrix.frustumM(_ProjectionMatrix, 0, -size, size, -size / ratio, size / ratio, zNear, zFar);
_planeProgram = loadProgram(_planeVertexShaderCode, _planeFragmentShaderCode);
_planeAPositionLocation = GLES20.glGetAttribLocation(_planeProgram, "aPosition");
_planeACoordinateLocation = GLES20.glGetAttribLocation(_planeProgram, "aCoord");
_planeUMVPLocation = GLES20.glGetUniformLocation(_planeProgram, "uMVP");
_planeUSamplerLocation = GLES20.glGetUniformLocation(_planeProgram, "uSampler");
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
_textureId = textures[0];
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _textureId);
InputStream is1 = _context.getResources().openRawResource(R.drawable.brick);
Bitmap img1;
try {
img1 = BitmapFactory.decodeStream(is1);
} finally {
try {
is1.close();
} catch(IOException e) {
//e.printStackTrace();
}
}
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); // GL_LINEAR
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img1, 0);
}
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
Matrix.multiplyMM(_MVPMatrix, 0, _ProjectionMatrix, 0, _ViewMatrix, 0);
GLES20.glUseProgram(_planeProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _textureId);
GLES20.glUniform1i(_planeUSamplerLocation, 0);
GLES20.glUniformMatrix4fv(_planeUMVPLocation, 1, false, _MVPMatrix, 0);
GLES20.glVertexAttribPointer(_planeAPositionLocation, 3, GLES20.GL_FLOAT, false, 12, _planeVFB);
GLES20.glEnableVertexAttribArray(_planeAPositionLocation);
GLES20.glVertexAttribPointer(_planeACoordinateLocation, 2, GLES20.GL_FLOAT, false, 8, _planeTFB);
GLES20.glEnableVertexAttribArray(_planeACoordinateLocation);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_SHORT, _planeISB);
System.gc();
}
public static void setZAngle(float angle) {
GLES20Renderer._zAngle = angle;
}
public static float getZAngle() {
return GLES20Renderer._zAngle;
}
private void initplane() {
float[] planeVFA = {
10.000000f,-10.000000f,0.000000f,
-10.000000f,-10.000000f,0.000000f,
10.000000f,10.000000f,0.000000f,
-10.000000f,10.000000f,0.000000f,
};
float[] planeTFA = {
// 1,0, 0,0, 1,1, 0,1
1,1, 0,1, 1,0, 0,0
};
short[] planeISA = {
2,3,1,
0,2,1,
};
ByteBuffer planeVBB = ByteBuffer.allocateDirect(planeVFA.length * 4);
planeVBB.order(ByteOrder.nativeOrder());
_planeVFB = planeVBB.asFloatBuffer();
_planeVFB.put(planeVFA);
_planeVFB.position(0);
ByteBuffer planeTBB = ByteBuffer.allocateDirect(planeTFA.length * 4);
planeTBB.order(ByteOrder.nativeOrder());
_planeTFB = planeTBB.asFloatBuffer();
_planeTFB.put(planeTFA);
_planeTFB.position(0);
ByteBuffer planeIBB = ByteBuffer.allocateDirect(planeISA.length * 2);
planeIBB.order(ByteOrder.nativeOrder());
_planeISB = planeIBB.asShortBuffer();
_planeISB.put(planeISA);
_planeISB.position(0);
}
private int loadShader(int type, String source) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
return shader;
}
private int loadProgram(String vertexShaderCode, String fragmentShaderCode) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
return program;
}
private final String _planeVertexShaderCode =
"attribute vec4 aPosition; \n"
+ "attribute vec2 aCoord; \n"
+ "varying vec2 vCoord; \n"
+ "uniform mat4 uMVP; \n"
+ "void main() { \n"
+ " gl_Position = uMVP * aPosition; \n"
+ " vCoord = aCoord; \n"
+ "} \n";
private final String _planeFragmentShaderCode =
"#ifdef GL_FRAGMENT_PRECISION_HIGH \n"
+ "precision highp float; \n"
+ "#else \n"
+ "precision mediump float; \n"
+ "#endif \n"
+ "varying vec2 vCoord; \n"
+ "uniform sampler2D uSampler; \n"
+ "void main() { \n"
+ " gl_FragColor = texture2D(uSampler,vCoord); \n"
+ "} \n";
}
More of these at - http://www.apress.com/9781430250531
If your code is really OpenGL ES 2.0, you should not be using the GL10 wrapper class. I think 1.0 did not support non-power of 2 texture sizes, without using extensions, but I guess you have handled that? You may have found a bug in the OpenGL ES drivers for your device, since it works on the AVD emulator. Try a different device, with a different type of GPU. This article will help:
http://software.intel.com/en-us/articles/porting-opengl-games-to-android-on-intel-atom-processors-part-1

GL11 Texture rendering wrong

I'm trying to render an image on a basic quad, I took a look at the Space Invaders Example Game for the code, and implemented that code into mine. The image gets renderer on screen, with the right colors, but the image seems shifted. This is the image I'm trying to render:
http://img203.imageshack.us/img203/5264/testwq.png
This is how it renders:
http://img593.imageshack.us/img593/6849/test2uh.png
The image is 128x128, and so is the quad.
Here is my code:
public class RenderEngine
{
private IntBuffer intbuf = BufferUtils.createIntBuffer(1);
private ColorModel glAlphaColorModel;
private ColorModel glColorModel;
public RenderEngine()
{
this.glAlphaColorModel = new ComponentColorModel(ColorSpace.getInstance(ColorSpace.CS_sRGB), new int[] { 8, 8, 8, 8 }, true, false, Transparency.TRANSLUCENT, DataBuffer.TYPE_BYTE);
this.glColorModel = new ComponentColorModel(ColorSpace.getInstance(ColorSpace.CS_sRGB), new int[] { 8, 8, 8, 0 }, false, false, Transparency.OPAQUE, DataBuffer.TYPE_BYTE);
}
public void bindTexture(String filename)
{
try
{
File file = new File(CivilPolitica.instance.getDir(), "resources/" + filename);
FileInputStream fis = new FileInputStream(file);
GL11.glBindTexture(GL11.GL_TEXTURE_2D, this.getTexture(fis));
fis.close();
}
catch (Exception e)
{
e.printStackTrace();
System.exit(0);
}
}
private int getTexture(InputStream in)
{
try
{
GL11.glGenTextures(this.intbuf);
int id = this.intbuf.get(0);
GL11.glBindTexture(GL11.GL_TEXTURE_2D, id);
BufferedImage bi = ImageIO.read(in);
int format = bi.getColorModel().hasAlpha() ? GL11.GL_RGBA : GL11.GL_RGB;
ByteBuffer texData;
WritableRaster raster;
BufferedImage texImage;
int texWidth = 2;
int texHeight = 2;
while (texWidth < bi.getWidth())
{
texWidth *= 2;
}
while (texHeight < bi.getHeight())
{
texHeight *= 2;
}
if (bi.getColorModel().hasAlpha())
{
raster = Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, texWidth, texHeight, 4, null);
texImage = new BufferedImage(this.glAlphaColorModel, raster, false, new Hashtable<String, Object>());
}
else
{
raster = Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, texWidth, texHeight, 3, null);
texImage = new BufferedImage(this.glColorModel, raster, false, new Hashtable<String, Object>());
}
Graphics g = texImage.getGraphics();
g.setColor(new Color(0f, 0f, 0f, 0f));
g.fillRect(0, 0, texWidth, texHeight);
g.drawImage(bi, 0, 0, null);
byte[] data = ((DataBufferByte) texImage.getRaster().getDataBuffer()).getData();
texData = ByteBuffer.allocateDirect(data.length);
texData.order(ByteOrder.nativeOrder());
texData.put(data, 0, data.length);
texData.flip();
glTexParameteri(GL11.GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL11.GL_LINEAR);
glTexParameteri(GL11.GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL11.GL_LINEAR);
glTexImage2D(GL11.GL_TEXTURE_2D, 0, GL11.GL_RGBA, bi.getWidth(), bi.getHeight(), 0, format, GL_UNSIGNED_BYTE, texData);
return id;
}
catch (Exception e)
{
e.printStackTrace();
System.exit(0);
return 0;
}
}
}
And the actual quad:
CivilPolitica.instance.renderer.bindTexture("test.png");
GL11.glPushMatrix();
GL11.glTranslatef(128, 128, 0);
GL11.glBegin(GL11.GL_QUADS);
GL11.glTexCoord2f(0, 0);
GL11.glVertex2i(0, 0);
GL11.glTexCoord2f(0, 127);
GL11.glVertex2i(0, 128);
GL11.glTexCoord2f(127, 127);
GL11.glVertex2i(128, 128);
GL11.glTexCoord2f(127, 0);
GL11.glVertex2i(128, 0);
GL11.glEnd();
GL11.glPopMatrix();
GL11.glTexCoord2f(0, 0);
GL11.glVertex2i(0, 0);
GL11.glTexCoord2f(0, 127);
GL11.glVertex2i(0, 128);
GL11.glTexCoord2f(127, 127);
GL11.glVertex2i(128, 128);
GL11.glTexCoord2f(127, 0);
GL11.glVertex2i(128, 0);
must be
GL11.glTexCoord2f(0.0f, 0.0f);
GL11.glVertex2i(0, 0);
GL11.glTexCoord2f(0.0f, 1.0f);
GL11.glVertex2i(0, 128);
GL11.glTexCoord2f(1.0f, 1.0f);
GL11.glVertex2i(128, 128);
GL11.glTexCoord2f(1.0f, 0.0f);
GL11.glVertex2i(128, 0);
because it are texture coordinates from 0.0f to 1.0f (0.0f ist the one side and 1.0f is the other, that way it is not resolution dependent)

Categories

Resources