I am running on the same issue for 2 days now. I am trying to render the usual 2D triangle (in 3D space) in OpenGL ES 2.0, on android (minApi 15, target 23). I have rewritten the entire code twice, and i get always the same results. No OpenGL Errors or warnings, no Java exceptions, and the screen is just black....
Anyway here's the code for the renderer class, the activity just creates the renderer and calls start() in the onCreate().
import static android.opengl.GLES20.*;
public class renderer implements GLSurfaceView.Renderer {
private Activity a;
private GLSurfaceView v;
private int buffs[];
private int prg;
private static boolean checkGLError(){
int i = glGetError();
if(i != GL_NO_ERROR){
Log.e("GLError", "Error: " + i);
return true;
}
return false;
}
private String VShaderSource =
"precision highp float;\n" +
"\n" +
"attribute mat4 mVP;\n" +
"\n" +
"attribute vec3 vPos;\n" +
"attribute vec3 col;\n" +
"\n" +
"varying vec3 oCol;\n" +
"\n" +
"void main(){\n" +
" gl_Position = mVP * vec4(vPos, 1.0);\n" +
" oCol = col;\n" +
"}";
private String FShaderSource =
"precision mediump float;\n" +
"\n" +
"varying vec3 oCol;\n" +
"\n" +
"void main(){\n" +
" gl_FragColor = vec4(oCol, 255);\n" +
"}";
public renderer(Activity a){
this.a = a;
}
public void start(){
v = new GLSurfaceView(this.a);
v.setEGLContextClientVersion(2);
v.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
v.setRenderer(this);
v.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
a.setContentView(v);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.w("OGL Renderer", "OnSUfraceCreated Called!");
int vs = glCreateShader(GL_VERTEX_SHADER);
int fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(vs, VShaderSource);
glCompileShader(vs);
int var[] = new int[1];
glGetShaderiv(vs, GL_COMPILE_STATUS, var, 0);
if(var[0] != GL_TRUE){
Log.e("OGL Shader", "VShader error: " + glGetShaderInfoLog(vs));
}else{
Log.w("OGL Shader", "VShader compiled successfully");
}
glShaderSource(fs, FShaderSource);
glCompileShader(fs);
glGetShaderiv(fs, GL_COMPILE_STATUS, var, 0);
if(var[0] != GL_TRUE){
Log.e("OGL Shader", "FShader error: " + glGetShaderInfoLog(fs));
}else{
Log.w("OGL Shader", "FShader compiled successfully");
}
prg = glCreateProgram();
glAttachShader(prg, vs);
glAttachShader(prg, fs);
glLinkProgram(prg);
glGetProgramiv(prg, GL_LINK_STATUS, var, 0);
if(var[0] != GL_TRUE){
Log.e("OGL Shader", "Linker error: " + glGetProgramInfoLog(prg));
}else{
Log.w("OGL Shader", "Program compiled successfully");
}
glUseProgram(prg);
if(checkGLError()){
Log.e("Error", "GLError during Program initialization");
}
float[] vertices = {-100, -100, -1,
100, -100, -1,
0, 100, -1};
ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
vbb.order(ByteOrder.nativeOrder());
FloatBuffer vFb = vbb.asFloatBuffer();
vFb.put(vertices);
vFb.position(0);
float[] colors = {1, 0, 0,
0, 1, 0,
0, 0, 1};
ByteBuffer cbb = ByteBuffer.allocateDirect(colors.length * 4);
cbb.order(ByteOrder.nativeOrder());
FloatBuffer cFb = cbb.asFloatBuffer();
cFb.put(colors);
cFb.position(0);
buffs = new int[2];
glGenBuffers(2, buffs, 0);
if(checkGLError()){
Log.e("Error", "GLError during Buffer generation");
}
glBindBuffer(GL_ARRAY_BUFFER, buffs[0]);
glBufferData(GL_ARRAY_BUFFER, vFb.capacity(), vFb, GL_STATIC_DRAW);
if(checkGLError()){
Log.e("Error", "GLError during GLBufferData 1");
}
glBindBuffer(GL_ARRAY_BUFFER, buffs[1]);
glBufferData(GL_ARRAY_BUFFER, cFb.capacity(), cFb, GL_STATIC_DRAW);
if(checkGLError()){
Log.e("Error", "GLError during GLBufferData 2");
}
int mPos = glGetUniformLocation(prg, "mVP");
float[] projection = new float[16], view = new float[16], mVP = new float[16];
Matrix.setIdentityM(projection, 0);
Matrix.setLookAtM(view, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
Matrix.perspectiveM(projection, 0, 90.0f, 16.0f/9.0f, 0.000001f, 1000.0f);
Matrix.multiplyMM(mVP, 0, projection, 0, view, 0);
glUniformMatrix4fv(mPos, 1, false, mVP, 0);
if(checkGLError()){
Log.e("Error", "GLError during matrix load");
}
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
glViewPort(0, 0, width, height);
}
#Override
public void onDrawFrame(GL10 gl) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
int vPos, cPos;
vPos = glGetAttribLocation(prg, "vPos");
if(checkGLError()){
Log.e("Error", "GLError during GetAttribLocation 1");
}
glEnableVertexAttribArray(vPos);
cPos = glGetAttribLocation(prg, "col");
if(checkGLError()){
Log.e("Error", "GLError during GetAttribLocation 2");
}
glEnableVertexAttribArray(cPos);
glBindBuffer(GL_ARRAY_BUFFER, buffs[0]);
glVertexAttribPointer(vPos, 3, GL_FLOAT, false, 0, 0);
if(checkGLError()){
Log.e("Error", "GLError during VertexAttribPointer 1");
}
glBindBuffer(GL_ARRAY_BUFFER, buffs[1]);
glVertexAttribPointer(cPos, 3, GL_FLOAT, false, 0, 0);
if(checkGLError()){
Log.e("Error", "GLError during VertexAttribPointer 2");
}
glDrawArrays(GL_TRIANGLES, 0, 1);
if(checkGLError()){
Log.e("Error", "GLError during DrawArrays");
}
}
}
Any help is appreciated.
-John
EDIT I copied the wrong vertex Z positions, it did not change anything tough
EDIT 2 Added the glEnableVertexArrtibArray() calls, still not working
I think I found the issue. I was creating a very large triangle, using smaller points (1.0f ~ 10.0f) solved the issue. Also, moved the view matrix a bit further away.
Related
I am translating OpenGL SuperBible demos into Java using JOGL. All the demos up until chapter 12 went well, but now I am stuck at shadow mapping. Here is my setup:
public class ShadowMap implements GLEventListener
{
public static void main(String[] args)
{
ShadowMap rend = new ShadowMap();
window.NEWTWindowStarter.init(rend, null, null);
}
private int light_program, camera_program;
private int[] obj, bkg;
private int camera_mv_pointer, camera_proj_pointer, light_mvp_pointer, camera_shadow_pointer;
private int fbo, depth_tex;
private int screen_width, screen_height;
private float aspect;
private double[] light_position, camera_position;
private double[][] light_proj_matrix, light_view_matrix;
private double[][] camera_proj_matrix, camera_view_matrix;
private double[][] bias_matrix, shadow_matrix;
private double[][] obj_model_matrix, bkg_model_matrix;
#Override
public void init(GLAutoDrawable glAutoDrawable)
{
GL4 gl = glAutoDrawable.getGL().getGL4bc();
ObjectParser parser = new ObjectParser("pawn_s.obj");
obj = BufferController.prepareVAO(gl, parser.getDataHolder());
parser = new ObjectParser("bkg.obj");
bkg = BufferController.prepareVAO(gl, parser.getDataHolder());
light_program = ShaderController.init(gl, "shaders/demo/d25/light_v.glsl", "shaders/demo/d25/light_f.glsl");
light_mvp_pointer = gl.glGetUniformLocation(light_program, "mvp");
camera_program = ShaderController.init(gl, "shaders/demo/d25/camera_v.glsl", "shaders/demo/d25/camera_f.glsl");
camera_mv_pointer = gl.glGetUniformLocation(camera_program, "mv_matrix");
camera_proj_pointer = gl.glGetUniformLocation(camera_program, "proj_matrix");
camera_shadow_pointer = gl.glGetUniformLocation(camera_program, "shadow_matrix");
depth_tex = BufferController.generateTextureId(gl);
gl.glBindTexture(gl.GL_TEXTURE_2D, depth_tex);
gl.glTexStorage2D(gl.GL_TEXTURE_2D, 11, gl.GL_DEPTH_COMPONENT32F, 1024, 1024);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP_TO_BORDER);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP_TO_BORDER);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_COMPARE_MODE, gl.GL_COMPARE_REF_TO_TEXTURE);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_COMPARE_FUNC, gl.GL_LEQUAL);
gl.glActiveTexture(gl.GL_TEXTURE0);
gl.glBindTexture(gl.GL_TEXTURE_2D, depth_tex);
fbo = BufferController.generateFrameId(gl);
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, fbo);
gl.glFramebufferTexture(gl.GL_FRAMEBUFFER, gl.GL_DEPTH_ATTACHMENT, depth_tex, 0);
gl.glDrawBuffer(gl.GL_NONE);
gl.glReadBuffer(gl.GL_NONE);
if(gl.glCheckFramebufferStatus(gl.GL_FRAMEBUFFER) == gl.GL_FRAMEBUFFER_COMPLETE)
System.out.println("FrameBuffer OK!");
else
System.out.println("FrameBuffer FAIL!");
gl.glBindTexture(gl.GL_TEXTURE_2D, 0);
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0);
gl.glEnable(gl.GL_DEPTH_TEST);
}
#Override
public void dispose(GLAutoDrawable glAutoDrawable){}
#Override
public void display(GLAutoDrawable glAutoDrawable)
{
GL4bc gl = glAutoDrawable.getGL().getGL4bc();
computerMatrices();
gl.glEnable(gl.GL_DEPTH_TEST);
// depth pass
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, fbo);
gl.glViewport(0, 0, 1024, 1024);
gl.glUseProgram(light_program);
drawFromLight(gl, obj, obj_model_matrix);
drawFromLight(gl, bkg, bkg_model_matrix);
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0);
// final pass
gl.glViewport(0, 0, screen_width, screen_height);
gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT);
gl.glBindTexture(gl.GL_TEXTURE_2D, depth_tex);
gl.glDrawBuffer(gl.GL_BACK);
gl.glUseProgram(camera_program);
drawFromCamera(gl, obj, obj_model_matrix);
drawFromCamera(gl, bkg, bkg_model_matrix);
gl.glBindTexture(gl.GL_TEXTURE_2D, 0);
}
private void computerMatrices()
{
bkg_model_matrix = Matrix.model(0, 0, 0, 0, 0, 0, 1);
obj_model_matrix = Matrix.model(0, 0, 0, 0, 0, 0, 1);
light_position = new double[]{10, 10, 10};
camera_position = new double[]{10, 0, 10};
light_proj_matrix = Matrix.frustum(-1.0, 1.0, -1.0, 1.0, 1.0, 200.0);
light_view_matrix = Matrix.lookAt(light_position, new double[]{0, 0, 0}, new double[]{0, 1, 0});
camera_proj_matrix = Matrix.perspective(60, aspect, 0.1f, 1000f);
camera_view_matrix = Matrix.lookAt(camera_position, new double[]{0, 0, 0}, new double[]{0, 1, 0});
bias_matrix = new double[][]{
{0.5, 0.0, 0.0, 0.0},
{0.0, 0.5, 0.0, 0.0},
{0.0, 0.0, 0.5, 0.0},
{0.5, 0.5, 0.5, 1.0}};
shadow_matrix = Matrix.mult(bias_matrix, light_proj_matrix, light_view_matrix);
}
private void drawFromLight(GL4bc gl, int[] obj, double[][] model_matrix)
{
gl.glUniformMatrix4fv(light_mvp_pointer, 1, false, Matrix.toArrayF(Matrix.mult(light_proj_matrix, light_view_matrix, model_matrix)), 0);
BufferControllerDep.renderTrianglesVAO(gl, obj[0], obj[1], obj[2]);
}
private void drawFromCamera(GL4bc gl, int[] obj, double[][] model_matrix)
{
gl.glUniformMatrix4fv(camera_shadow_pointer, 1, false, Matrix.toArrayF(Matrix.mult(shadow_matrix, model_matrix)), 0);
gl.glUniformMatrix4fv(camera_proj_pointer, 1, false, Matrix.toArrayF(camera_proj_matrix), 0);
gl.glUniformMatrix4fv(camera_mv_pointer, 1, false, Matrix.toArrayF(Matrix.mult(camera_view_matrix, model_matrix)), 0);
BufferControllerDep.renderTrianglesVAO(gl, obj[0], obj[1], obj[2]);
}
#Override
public void reshape(GLAutoDrawable glAutoDrawable, int x, int y, int width, int height)
{
screen_width = width;
screen_height = height;
aspect = (1.0f * screen_width) / screen_height;
}
}
Light shaders are very basic. Camera Vertex shader is as follows:
#version 430 core
uniform mat4 mv_matrix;
uniform mat4 proj_matrix;
uniform mat4 shadow_matrix;
layout (location = 0) in vec4 position;
layout (location = 1) in vec3 normal;
out vec4 shadow_coord;
uniform vec3 light_pos = vec3(10.0, 10.0, 10.0);
out vec3 N;
out vec3 L;
void main(void)
{
N = normalize(mat3(mv_matrix) * normal);
L = normalize(light_pos - (mv_matrix * position).xyz);
shadow_coord = shadow_matrix * position;
gl_Position = proj_matrix * mv_matrix * position;
}
And camera Fragment shader:
#version 430 core
layout (location = 0) out vec4 color;
layout (binding = 0) uniform sampler2DShadow shadow_tex;
in vec4 shadow_coord;
uniform vec3 D = vec3(0.9, 0.8, 1.0);
in vec3 N;
in vec3 L;
void main(void)
{
vec3 diffuse = dot(N, L) * D;
float shadow = textureProj(shadow_tex, shadow_coord);
// color = vec4(diffuse, 1.0);
color = shadow * vec4(1);
}
Just rendering diffuse works fine. Projection, LookAt, and Transformation matrices seem to work.
However when trying to render shadows, it fails:
Moving light closer to the object produces this:
I use my own Matrix and Vector classes and can provide source code if needed.
Any insight is appreciated. Thank you!
The problem was caused by the reverse matrix multiplication order in Java code. Here is corrected code:
private void drawFromLight(GL4bc gl, int[] obj, double[][] model_matrix)
{
gl.glUniformMatrix4fv(light_mvp_pointer, 1, false,
Matrix.toArrayF(Matrix.mult(model_matrix, light_view_matrix, light_proj_matrix)), 0);
BufferControllerDep.renderTrianglesVAO(gl, obj[0], obj[1], obj[2]);
}
private void drawFromCamera(GL4bc gl, int[] obj, double[][] model_matrix)
{
gl.glUniformMatrix4fv(camera_shadow_pointer, 1, false,
Matrix.toArrayF(Matrix.mult(model_matrix, shadow_matrix)), 0);
gl.glUniformMatrix4fv(camera_mv_pointer, 1, false,
Matrix.toArrayF(Matrix.mult(model_matrix, camera_view_matrix)), 0);
gl.glUniformMatrix4fv(camera_mvp_pointer, 1, false,
Matrix.toArrayF(Matrix.mult(model_matrix, camera_view_matrix, camera_proj_matrix)), 0);
BufferControllerDep.renderTrianglesVAO(gl, obj[0], obj[1], obj[2]);
}
Using the same shaders this will produce following result:
Also adding
gl.glEnable(gl.GL_POLYGON_OFFSET_FILL);
gl.glPolygonOffset(2.0f, 0.0f);
before performing "light pass" gets rid of distortion:
Mixing shadows and diffuse produces this:
There is still room for improvement however...
I am using LibGDX 0.9.9. I am trying to render cubemap and fog. So my code snippet below:
public void show() {
modelBatch = new ModelBatch();
environment = new Environment();
environment.set(new ColorAttribute(ColorAttribute.AmbientLight, 1.0f, 0.4f, 0.4f, 1f));
environment.set(new ColorAttribute(ColorAttribute.Fog, 0.9f, 1f, 0f, 1f));
environment.add(new DirectionalLight().set(0.8f, 0.8f, 0.8f, -1f, -0.8f, -0.2f));
cubemap = new Cubemap(Gdx.files.internal("cubemap/pos-x.png"),
Gdx.files.internal("cubemap/neg-x.png"),
Gdx.files.internal("cubemap/pos-y.png"),
Gdx.files.internal("cubemap/neg-y.png"),
Gdx.files.internal("cubemap/pos-z.png"),
Gdx.files.internal("cubemap/neg-z.png"));
environment.set(new CubemapAttribute(CubemapAttribute.EnvironmentMap, cubemap));
cam = new PerspectiveCamera(67, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
cam.position.set(1f, 1f, 1f);
cam.lookAt(0,0,0);
cam.near = 0.1f;
cam.far = 300f;
cam.update();
ModelLoader loader = new ObjLoader();
model = loader.loadModel(Gdx.files.internal("earth/earth.obj"));
instance = new ModelInstance(model);
NodePart blockPart = model.nodes.get(0).parts.get(0);
renderable = new Renderable();
blockPart.setRenderable(renderable);
renderable.environment = environment;
renderable.worldTransform.idt();
renderContext = new RenderContext(new DefaultTextureBinder(DefaultTextureBinder.WEIGHTED, 1));
shader = new DefaultShader(renderable);
shader.init();
camController = new CameraInputController(cam);
Gdx.input.setInputProcessor(camController);
}
#Override
public void render(float delta) {
camController.update();
Gdx.gl.glViewport(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
Gdx.gl.glClearColor(0, 0, 0, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
renderContext.begin();
shader.begin(cam, renderContext);
shader.render(renderable);
shader.end();
renderContext.end();
}
But nothing happens. I see object only.
What am I doing wrong?
After spending some time, I implemented cube map in LibGDX. Perhaps, it's not ideal solution, but there is nothing more (At least I couldn't find anything). So, I used native OpenGL ES functions and LibGDX. My class is below:
public class EnvironmentCubemap implements Disposable{
protected final Pixmap[] data = new Pixmap[6];
protected ShaderProgram shader;
protected int u_worldTrans;
protected Mesh quad;
private Matrix4 worldTrans;
private Quaternion q;
protected String vertexShader = " attribute vec3 a_position; \n"+
" attribute vec3 a_normal; \n"+
" attribute vec2 a_texCoord0; \n"+
" uniform mat4 u_worldTrans; \n"+
" varying vec2 v_texCoord0; \n"+
" varying vec3 v_cubeMapUV; \n"+
" void main() { \n"+
" v_texCoord0 = a_texCoord0; \n"+
" vec4 g_position = u_worldTrans * vec4(a_position, 1.0); \n"+
" v_cubeMapUV = normalize(g_position.xyz); \n"+
" gl_Position = vec4(a_position, 1.0); \n"+
" } \n";
protected String fragmentShader = "#ifdef GL_ES \n"+
" precision mediump float; \n"+
" #endif \n"+
" uniform samplerCube u_environmentCubemap; \n"+
" varying vec2 v_texCoord0; \n"+
" varying vec3 v_cubeMapUV; \n"+
" void main() { \n"+
" gl_FragColor = vec4(textureCube(u_environmentCubemap, v_cubeMapUV).rgb, 1.0); \n"+
" } \n";
public String getDefaultVertexShader(){
return vertexShader;
}
public String getDefaultFragmentShader(){
return fragmentShader;
}
public EnvironmentCubemap (Pixmap positiveX, Pixmap negativeX, Pixmap positiveY, Pixmap negativeY, Pixmap positiveZ, Pixmap negativeZ) {
data[0]=positiveX;
data[1]=negativeX;
data[2]=positiveY;
data[3]=negativeY;
data[4]=positiveZ;
data[5]=negativeZ;
init();
}
public EnvironmentCubemap (FileHandle positiveX, FileHandle negativeX, FileHandle positiveY, FileHandle negativeY, FileHandle positiveZ, FileHandle negativeZ) {
this(new Pixmap(positiveX), new Pixmap(negativeX), new Pixmap(positiveY), new Pixmap(negativeY), new Pixmap(positiveZ), new Pixmap(negativeZ));
}
//IF ALL SIX SIDES ARE REPRESENTED IN ONE IMAGE
public EnvironmentCubemap (Pixmap cubemap) {
int w = cubemap.getWidth();
int h = cubemap.getHeight();
for(int i=0; i<6; i++) data[i] = new Pixmap(w/4, h/3, Format.RGB888);
for(int x=0; x<w; x++)
for(int y=0; y<h; y++){
//-X
if(x>=0 && x<=w/4 && y>=h/3 && y<=h*2/3) data[1].drawPixel(x, y-h/3, cubemap.getPixel(x, y));
//+Y
if(x>=w/4 && x<=w/2 && y>=0 && y<=h/3) data[2].drawPixel(x-w/4, y, cubemap.getPixel(x, y));
//+Z
if(x>=w/4 && x<=w/2 && y>=h/3 && y<=h*2/3) data[4].drawPixel(x-w/4, y-h/3, cubemap.getPixel(x, y));
//-Y
if(x>=w/4 && x<=w/2 && y>=h*2/3 && y<=h) data[3].drawPixel(x-w/4, y-h*2/3, cubemap.getPixel(x, y));
//+X
if(x>=w/2 && x<=w*3/4 && y>=h/3 && y<=h*2/3) data[0].drawPixel(x-w/2, y-h/3, cubemap.getPixel(x, y));
//-Z
if(x>=w*3/4 && x<=w && y>=h/3 && y<=h*2/3) data[5].drawPixel(x-w*3/4, y-h/3, cubemap.getPixel(x, y));
}
cubemap.dispose();
cubemap=null;
init();
}
private void init(){
shader = new ShaderProgram(vertexShader, fragmentShader);
if (!shader.isCompiled())
throw new GdxRuntimeException(shader.getLog());
u_worldTrans = shader.getUniformLocation("u_worldTrans");
quad = createQuad();
worldTrans = new Matrix4();
q = new Quaternion();
initCubemap();
}
private void initCubemap(){
//bind cubemap
Gdx.gl20.glBindTexture(GL20.GL_TEXTURE_CUBE_MAP, 0);
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, GL20.GL_RGB, data[0].getWidth(), data[0].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[0].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, GL20.GL_RGB, data[1].getWidth(), data[1].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[1].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, GL20.GL_RGB, data[2].getWidth(), data[2].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[2].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, 0, GL20.GL_RGB, data[3].getWidth(), data[3].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[3].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_POSITIVE_Z, 0, GL20.GL_RGB, data[4].getWidth(), data[4].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[4].getPixels());
Gdx.gl20.glTexImage2D(GL20.GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, 0, GL20.GL_RGB, data[5].getWidth(), data[5].getHeight(), 0, GL20.GL_RGB, GL20.GL_UNSIGNED_BYTE, data[5].getPixels());
//Gdx.gl20.glGenerateMipmap(GL20.GL_TEXTURE_CUBE_MAP);
//Gdx.gl20.glTexParameteri(GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_MIN_FILTER, GL20.GL_LINEAR);
Gdx.gl20.glTexParameteri ( GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_MIN_FILTER,GL20.GL_LINEAR_MIPMAP_LINEAR );
Gdx.gl20.glTexParameteri ( GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_MAG_FILTER,GL20.GL_LINEAR );
Gdx.gl20.glTexParameteri ( GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_WRAP_S, GL20.GL_CLAMP_TO_EDGE );
Gdx.gl20.glTexParameteri ( GL20.GL_TEXTURE_CUBE_MAP, GL20.GL_TEXTURE_WRAP_T, GL20.GL_CLAMP_TO_EDGE );
Gdx.gl20.glGenerateMipmap(GL20.GL_TEXTURE_CUBE_MAP);
}
public void render(Camera camera){
//SPECIAL THANKS TO Jos van Egmond
camera.view.getRotation( q, true );
q.conjugate();
///////////////////////////////////
worldTrans.idt();
worldTrans.rotate(quaternion);
shader.begin();
shader.setUniformMatrix(u_worldTrans, worldTrans.translate(0, 0, -1));
quad.render(shader, GL20.GL_TRIANGLES);
shader.end();
}
public Mesh createQuad(){
Mesh mesh = new Mesh(true, 4, 6, VertexAttribute.Position(), VertexAttribute. ColorUnpacked(), VertexAttribute.TexCoords(0));
mesh.setVertices(new float[]
{-1f, -1f, 0, 1, 1, 1, 1, 0, 1,
1f, -1f, 0, 1, 1, 1, 1, 1, 1,
1f, 1f, 0, 1, 1, 1, 1, 1, 0,
-1f, 1f, 0, 1, 1, 1, 1, 0, 0});
mesh.setIndices(new short[] {0, 1, 2, 2, 3, 0});
return mesh;
}
#Override
public void dispose() {
shader.dispose();
quad.dispose();
for(int i=0; i<6; i++)
data[i].dispose();
}
}
How to use it? Just create instance of it:
EnvironmentCubemap envCubemap = new EnvironmentCubemap(Gdx.files.internal("cubemap/pos-x.png"), Gdx.files.internal("cubemap/neg-x.png"),
Gdx.files.internal("cubemap/pos-y.jpg"), Gdx.files.internal("cubemap/neg-y.jpg"),
Gdx.files.internal("cubemap/pos-z.png"), Gdx.files.internal("cubemap/neg-z.png"));
or
EnvironmentCubemap envCubemap = new EnvironmentCubemap(new Pixmap(Gdx.files.internal("cubemap/all_in_one.jpg")));
and then use its render method:
envCubemap.render(camera);
I hope it helps someone else!
The default shader (the glsl files) currently don't support a cubemap. You'll have to provide your own glsl files to use a cubemap. The DefaultShader (the CPU part of the shader that is used by default) will bind the cubemap to the uniform called: u_environmentCubemap. Also, the macro environmentCubemapFlag will be defined by the DefaultShader if the material contains an environment cubemap attribute. Use the following snippet in your shader to use the cubemap:
#ifdef environmentCubemapFlag
uniform samplerCube u_environmentCubemap;
#endif
Here's a relevant example snippet to use cubemap (and normal map): https://github.com/libgdx/libgdx/blob/master/tests/gdx-tests-android/assets/data/g3d/shaders/reflect.glsl
Here's a more advanced example snippet: https://github.com/libgdx/libgdx/blob/master/tests/gdx-tests-android/assets/data/g3d/shaders/test.glsl
You can specify your custom shader like this:
modelBatch = new ModelBatch(Gdx.files.internal("data/vertex.glsl"), Gdx.files.internal("data/fragment.glsl"));
More info about using a custom shader: http://blog.xoppa.com/creating-a-shader-with-libgdx/
I played around the cubemap and created a class which doesn't use native textureCube. Instead of it I created 6 planes and located them around the camera. So, my camera is fixed inside these "walls". This implementation is a bit faster and easier than using cubemap described above.
public class SkyBox implements Disposable{
Matrix4 tranformation;
ShaderProgram program;
int u_projTrans;
int u_worldTrans;
int u_tex;
Texture[] textures;
Mesh quad;
boolean invert = false;
protected String vertexShader =
" attribute vec4 a_position; "+
" attribute vec2 a_texCoord0; "+
" varying vec2 v_texCoord; "+
" uniform mat4 u_worldTrans; "+
" uniform mat4 u_projTrans; "+
" void main() "+
" { "+
" gl_Position = u_projTrans * u_worldTrans * vec4(a_position); "+
" v_texCoord = a_texCoord0; "+
" } ";
protected String fragmentShader =
" #ifdef GL_ES \n"+
" precision mediump float; \n"+
" #endif \n"+
" uniform sampler2D s_diffuse; "+
" varying vec2 v_texCoord; "+
" void main() "+
" { "+
" gl_FragColor = texture2D( s_diffuse, v_texCoord ); "+
" } ";
public String getDefaultVertexShader(){
return vertexShader;
}
public String getDefaultFragmentShader(){
return fragmentShader;
}
public SkyBox (Pixmap positiveX, Pixmap negativeX, Pixmap positiveY, Pixmap negativeY, Pixmap positiveZ, Pixmap negativeZ) {
textures = new Texture[6];
textures[3] = new Texture(positiveX);
textures[2] = new Texture(negativeX);
textures[4] = new Texture(positiveY);
textures[5] = new Texture(negativeY);
textures[0] = new Texture(positiveZ);
textures[1] = new Texture(negativeZ);
positiveX.dispose();
positiveX=null;
negativeX.dispose();
negativeX=null;
positiveY.dispose();
positiveY=null;
negativeY.dispose();
negativeY=null;
positiveZ.dispose();
positiveZ=null;
negativeZ.dispose();
negativeZ=null;
init();
}
public SkyBox (FileHandle positiveX, FileHandle negativeX, FileHandle positiveY, FileHandle negativeY, FileHandle positiveZ, FileHandle negativeZ) {
this(new Pixmap(positiveX), new Pixmap(negativeX), new Pixmap(positiveY), new Pixmap(negativeY), new Pixmap(positiveZ), new Pixmap(negativeZ));
}
public SkyBox (Pixmap cubemap) {
int w = cubemap.getWidth();
int h = cubemap.getHeight();
Pixmap[] data = new Pixmap[6];
for(int i=0; i<6; i++) data[i] = new Pixmap(w/4, h/3, Format.RGB888);
for(int x=0; x<w; x++)
for(int y=0; y<h; y++){
//-X
if(x>=0 && x<=w/4 && y>=h/3 && y<=h*2/3) data[1].drawPixel(x, y-h/3, cubemap.getPixel(x, y));
//+Y
if(x>=w/4 && x<=w/2+1 && y>=0 && y<=h/3) data[2].drawPixel(x-w/4, y, cubemap.getPixel(x, y));
//+Z
if(x>=w/4 && x<=w/2 && y>=h/3 && y<=h*2/3) data[4].drawPixel(x-w/4, y-h/3, cubemap.getPixel(x, y));
//-Y
if(x>=w/4 && x<=w/2 && y>=h*2/3 && y<=h) data[3].drawPixel(x-w/4, y-h*2/3, cubemap.getPixel(x, y));
//+X
if(x>=w/2 && x<=w*3/4 && y>=h/3 && y<=h*2/3) data[0].drawPixel(x-w/2, y-h/3, cubemap.getPixel(x, y));
//-Z
if(x>=w*3/4 && x<=w && y>=h/3 && y<=h*2/3) data[5].drawPixel(x-w*3/4, y-h/3, cubemap.getPixel(x, y));
}
textures = new Texture[6];
textures[0] = new Texture(data[4]);
textures[1] = new Texture(data[5]);
textures[2] = new Texture(data[1]);
textures[3] = new Texture(data[0]);
textures[4] = new Texture(data[2]);
textures[5] = new Texture(data[3]);
for(int i=0; i<6; i++) {
data[i].dispose();
data[i] = null;
}
cubemap.dispose();
cubemap=null;
init();
}
public SkyBox (FileHandle cubemap){
this(new Pixmap(cubemap));
}
public Mesh createTexturedQuad(){
Mesh quad = new Mesh(true, 4, 6, VertexAttribute.Position(), new VertexAttribute(Usage.TextureCoordinates, 2, "a_texCoord0"));
quad.setVertices(new float[]
{-1f, -1f, 0, 0, 1,
1f, -1f, 0, 1, 1,
1f, 1f, 0, 1, 0,
-1f, 1f, 0, 0, 0});
quad.setIndices(new short[] {0, 1, 2, 2, 3, 0});
return quad;
}
public void setInvert(boolean enable){
invert = enable;
}
public void init() {
program = new ShaderProgram(vertexShader, fragmentShader);
if (!program.isCompiled())
throw new GdxRuntimeException(program.getLog());
else Gdx.app.log("shader", "shader compiled successfully!");
u_projTrans = program.getUniformLocation("u_projTrans");
u_worldTrans = program.getUniformLocation("u_worldTrans");
u_tex = program.getUniformLocation("s_diffuse");
tranformation = new Matrix4();
quad = createTexturedQuad();
}
public void render(Camera camera){
Gdx.graphics.getGL20().glCullFace(GL20.GL_BACK);
program.begin();
program.setUniformMatrix(u_projTrans, camera.combined);
//front
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[0].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//left
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.Y, 90);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[ invert ? 3 : 2].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//right
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.Y, -90);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[invert ? 2 : 3].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//bottom
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.X, -90);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[5].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//top
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.X, 90);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[4].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
//back
tranformation.idt();
tranformation.translate(camera.position.x, camera.position.y, camera.position.z);
tranformation.rotate(Vector3.Y, 180);
tranformation.translate(0, 0, -1);
if(invert) tranformation.rotate(Vector3.Y, 180);
program.setUniformMatrix(u_worldTrans, tranformation);
textures[1].bind(0);
program.setUniformi("s_diffuse", 0);
quad.render(program, GL20.GL_TRIANGLES);
program.end();
}
#Override
public void dispose() {
program.dispose();
quad.dispose();
for(int i=0; i<6; i++){
textures[i].dispose();
textures[i]=null;
}
}
}
Using of this class is the same as previous one. Happy coding!
In addition to Nolesh' solution, the skybox can be rotated correctly in respect to the camera rotation like so:
Quaternion q = new Quaternion();
camera.view.getRotation( q, true );
q.conjugate();
envCubemap.render( q );
Cannot seem to get Vertex Attribute Arrays working properly for per vertex data.
Here's the SSCCE:
private static void createDisplay(int w, int h) {
try {
Display.create();
Display.setDisplayMode(new DisplayMode(w, h));
}
catch (LWJGLException e) {
e.printStackTrace();
}
float size = 1;
float aspect = (float) Display.getWidth() / Display.getHeight();
GL11.glMatrixMode(GL11.GL_PROJECTION);
GL11.glLoadIdentity();
GL11.glOrtho(-size * aspect, size * aspect, -size, size, -1, 1);
GL11.glMatrixMode(GL11.GL_MODELVIEW);
GL11.glLoadIdentity();
}
public static void main(String[] args) {
createDisplay(1200, 800);
GL11.glViewport(0, 0, Display.getWidth(), Display.getHeight());
ShaderManager.createShader("2Dv", new File("src/Shaders/2D.vert"), SHADER_VERT);
ShaderManager.createShader("2Df", new File("src/Shaders/2D.frag"), SHADER_FRAG);
ShaderManager.createProgram("2D", "2Dv", "2Df");
// Shader compiles and links correctly.
ShaderManager.useProgram("2D");
// Calls glUseProgram(programID);
float[] vertexData = new float[] {-0.5f, -0.5f, 0.5f, -0.5f, 0.5f, 0.5f, -0.5f, 0.5f};
int vao = GL30.glGenVertexArrays();
if (vao == 0)
System.exit(-1);
GL30.glBindVertexArray(vao);
int vertexBuffer = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vertexBuffer);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, BufferUtil.asDirectFloatBuffer(vertexData), GL15.GL_DYNAMIC_DRAW);
// GL11.glEnableClientState(GL11.GL_VERTEX_ARRAY);
// GL11.glVertexPointer(2, GL11.GL_FLOAT, 2 * 4, 0);
int loc = ShaderManager.currentProgram.getAttribute("vertex");
if (loc == -1)
Debug.log(Debug.INSTANCE_MANAGEMENT, "Attribute [", "", "] not found in Shader [",
ShaderManager.currentProgram.toString(), "].");
else {
GL20.glVertexAttribPointer(loc, 2, GLCONST.TYPE_FLOAT, false, 2 * 4, 0);
GL20.glEnableVertexAttribArray(loc);
}
GL30.glBindVertexArray(0);
GL11.glColor3f(1, 0, 0);
GL11.glClearColor(0.5f, 0.5f, 0.8f, 1);
int indexBuffer = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, indexBuffer);
GL15.glBufferData(GL15.GL_ELEMENT_ARRAY_BUFFER, BufferUtil.asDirectFloatBuffer(new float[] {0, 1, 2, 3}),
GL15.GL_DYNAMIC_DRAW);
while (!Display.isCloseRequested()) {
GL11.glClear(GL11.GL_COLOR_BUFFER_BIT);
GL30.glBindVertexArray(vao);
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, indexBuffer);
GL11.glDrawElements(GL11.GL_QUADS, 4, GL11.GL_UNSIGNED_INT, 0);
GL30.glBindVertexArray(0);
Display.update();
int error = GL11.glGetError();
if (error != GL11.GL_NO_ERROR)
System.out.println(GLU.gluErrorString(error));
}
}
The problem lies in the usage of vertex Attribute arrays. The old code I used was:
GL11.glEnableClientState(GL11.GL_VERTEX_ARRAY);
GL11.glVertexPointer(2, GL11.GL_FLOAT, 2 * 4, 0);
The new version is:
int loc = ShaderManager.currentProgram.getAttribute("vertex");//Call to glGetAttribLocation();
if (loc == -1){
System.exit(-1);
}
GL20.glVertexAttribPointer(loc, 2, GL11.GL_FLOAT, false, 2 * 4, 0);
GL20.glEnableVertexAttribArray(loc);
The original code was the commented 2 lines. Upon running, this correctly sent vertex data to gl_Vertex and rendered a square of size 1.
The new code shuld send vertex data to the vertex attribute, but it gets nothing.
When the original code is uncommented and both old and new code used, both gl_Vertex and vertex attribute get vertex data.
What is going wrong here?
So I figured out the problem after a while. The problem is due to an AMD driver bug when using a OpenGL 3.0+ core profile.
The "vertex" attribute array was assigned a location of 1.
The bug occurs when the attribute array 0 is unused. Nothing is rendered if array 0 is not enabled.
To fix this probelm I simply explicitly assigned "vertex" to location 0.
layout(location = 0) in vec4 vertex;
I am trying to create a texture atlas with opengl ES 2.0. I want to bind many little images, in one big image. It works good in the emulator, but it doesnt work with the device.
Here is my code:
gl.glGenTextures(1, textureID, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureID);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bmp, 0);
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), R.drawable.block);
ByteBuffer pixels = ByteBuffer.allocate(bitmap.getWidth() * bitmap.getHeight() * 4);
bitmap.copyPixelsToBuffer(pixels);
gl.glTexSubImage2D(GL10.GL_TEXTURE_2D, 0, 0, 0, bitmap.getWidth(), bitmap.getHeight(), GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, pixels);
The first Bitmap (bmp) is a 256x256 red image, the seconde Bitmap(bitmap) is a 16x16 white image. In the emulator I see a red rectangle with a little white image in it, but with the device I can only see the big red rectangle.
You are using power-of-two textures, which is great, as most implementations of OpenGL ES 2.0 do not support npot textures. Now, because you were able to see the textures on the emulator, it seems that you are not transforming the object (to be rendered) using the projection matrix (MVP matrix). This example shows how to do that -
public class GLES20Renderer implements Renderer {
private int _planeProgram;
private int _planeAPositionLocation;
private int _planeACoordinateLocation;
private int _planeUMVPLocation;
private int _planeUSamplerLocation;
private FloatBuffer _planeVFB;
private FloatBuffer _planeTFB;
private ShortBuffer _planeISB;
private float[] _ViewMatrix = new float[16];
private float[] _ProjectionMatrix = new float[16];
private float[] _MVPMatrix = new float[16];
private int _textureId;
public Context _context;
public GLES20Renderer(Context context) {
_context = context;
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
initplane();
float ratio = (float) width / height;
float zNear = 0.1f;
float zFar = 1000;
float fov = 0.95f; // 0.2 to 1.0
float size = (float) (zNear * Math.tan(fov / 2));
Matrix.setLookAtM(_ViewMatrix, 0, 0, 0, 50, 0, 0, 0, 0, 1, 0);
Matrix.frustumM(_ProjectionMatrix, 0, -size, size, -size / ratio, size / ratio, zNear, zFar);
_planeProgram = loadProgram(_planeVertexShaderCode, _planeFragmentShaderCode);
_planeAPositionLocation = GLES20.glGetAttribLocation(_planeProgram, "aPosition");
_planeACoordinateLocation = GLES20.glGetAttribLocation(_planeProgram, "aCoord");
_planeUMVPLocation = GLES20.glGetUniformLocation(_planeProgram, "uMVP");
_planeUSamplerLocation = GLES20.glGetUniformLocation(_planeProgram, "uSampler");
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
_textureId = textures[0];
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _textureId);
InputStream is1 = _context.getResources().openRawResource(R.drawable.brick);
Bitmap img1;
try {
img1 = BitmapFactory.decodeStream(is1);
} finally {
try {
is1.close();
} catch(IOException e) {
//e.printStackTrace();
}
}
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); // GL_LINEAR
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img1, 0);
}
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
Matrix.multiplyMM(_MVPMatrix, 0, _ProjectionMatrix, 0, _ViewMatrix, 0);
GLES20.glUseProgram(_planeProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _textureId);
GLES20.glUniform1i(_planeUSamplerLocation, 0);
GLES20.glUniformMatrix4fv(_planeUMVPLocation, 1, false, _MVPMatrix, 0);
GLES20.glVertexAttribPointer(_planeAPositionLocation, 3, GLES20.GL_FLOAT, false, 12, _planeVFB);
GLES20.glEnableVertexAttribArray(_planeAPositionLocation);
GLES20.glVertexAttribPointer(_planeACoordinateLocation, 2, GLES20.GL_FLOAT, false, 8, _planeTFB);
GLES20.glEnableVertexAttribArray(_planeACoordinateLocation);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_SHORT, _planeISB);
System.gc();
}
public static void setZAngle(float angle) {
GLES20Renderer._zAngle = angle;
}
public static float getZAngle() {
return GLES20Renderer._zAngle;
}
private void initplane() {
float[] planeVFA = {
10.000000f,-10.000000f,0.000000f,
-10.000000f,-10.000000f,0.000000f,
10.000000f,10.000000f,0.000000f,
-10.000000f,10.000000f,0.000000f,
};
float[] planeTFA = {
// 1,0, 0,0, 1,1, 0,1
1,1, 0,1, 1,0, 0,0
};
short[] planeISA = {
2,3,1,
0,2,1,
};
ByteBuffer planeVBB = ByteBuffer.allocateDirect(planeVFA.length * 4);
planeVBB.order(ByteOrder.nativeOrder());
_planeVFB = planeVBB.asFloatBuffer();
_planeVFB.put(planeVFA);
_planeVFB.position(0);
ByteBuffer planeTBB = ByteBuffer.allocateDirect(planeTFA.length * 4);
planeTBB.order(ByteOrder.nativeOrder());
_planeTFB = planeTBB.asFloatBuffer();
_planeTFB.put(planeTFA);
_planeTFB.position(0);
ByteBuffer planeIBB = ByteBuffer.allocateDirect(planeISA.length * 2);
planeIBB.order(ByteOrder.nativeOrder());
_planeISB = planeIBB.asShortBuffer();
_planeISB.put(planeISA);
_planeISB.position(0);
}
private int loadShader(int type, String source) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
return shader;
}
private int loadProgram(String vertexShaderCode, String fragmentShaderCode) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
return program;
}
private final String _planeVertexShaderCode =
"attribute vec4 aPosition; \n"
+ "attribute vec2 aCoord; \n"
+ "varying vec2 vCoord; \n"
+ "uniform mat4 uMVP; \n"
+ "void main() { \n"
+ " gl_Position = uMVP * aPosition; \n"
+ " vCoord = aCoord; \n"
+ "} \n";
private final String _planeFragmentShaderCode =
"#ifdef GL_FRAGMENT_PRECISION_HIGH \n"
+ "precision highp float; \n"
+ "#else \n"
+ "precision mediump float; \n"
+ "#endif \n"
+ "varying vec2 vCoord; \n"
+ "uniform sampler2D uSampler; \n"
+ "void main() { \n"
+ " gl_FragColor = texture2D(uSampler,vCoord); \n"
+ "} \n";
}
More of these at - http://www.apress.com/9781430250531
If your code is really OpenGL ES 2.0, you should not be using the GL10 wrapper class. I think 1.0 did not support non-power of 2 texture sizes, without using extensions, but I guess you have handled that? You may have found a bug in the OpenGL ES drivers for your device, since it works on the AVD emulator. Try a different device, with a different type of GPU. This article will help:
http://software.intel.com/en-us/articles/porting-opengl-games-to-android-on-intel-atom-processors-part-1
I'm trying to render an image on a basic quad, I took a look at the Space Invaders Example Game for the code, and implemented that code into mine. The image gets renderer on screen, with the right colors, but the image seems shifted. This is the image I'm trying to render:
http://img203.imageshack.us/img203/5264/testwq.png
This is how it renders:
http://img593.imageshack.us/img593/6849/test2uh.png
The image is 128x128, and so is the quad.
Here is my code:
public class RenderEngine
{
private IntBuffer intbuf = BufferUtils.createIntBuffer(1);
private ColorModel glAlphaColorModel;
private ColorModel glColorModel;
public RenderEngine()
{
this.glAlphaColorModel = new ComponentColorModel(ColorSpace.getInstance(ColorSpace.CS_sRGB), new int[] { 8, 8, 8, 8 }, true, false, Transparency.TRANSLUCENT, DataBuffer.TYPE_BYTE);
this.glColorModel = new ComponentColorModel(ColorSpace.getInstance(ColorSpace.CS_sRGB), new int[] { 8, 8, 8, 0 }, false, false, Transparency.OPAQUE, DataBuffer.TYPE_BYTE);
}
public void bindTexture(String filename)
{
try
{
File file = new File(CivilPolitica.instance.getDir(), "resources/" + filename);
FileInputStream fis = new FileInputStream(file);
GL11.glBindTexture(GL11.GL_TEXTURE_2D, this.getTexture(fis));
fis.close();
}
catch (Exception e)
{
e.printStackTrace();
System.exit(0);
}
}
private int getTexture(InputStream in)
{
try
{
GL11.glGenTextures(this.intbuf);
int id = this.intbuf.get(0);
GL11.glBindTexture(GL11.GL_TEXTURE_2D, id);
BufferedImage bi = ImageIO.read(in);
int format = bi.getColorModel().hasAlpha() ? GL11.GL_RGBA : GL11.GL_RGB;
ByteBuffer texData;
WritableRaster raster;
BufferedImage texImage;
int texWidth = 2;
int texHeight = 2;
while (texWidth < bi.getWidth())
{
texWidth *= 2;
}
while (texHeight < bi.getHeight())
{
texHeight *= 2;
}
if (bi.getColorModel().hasAlpha())
{
raster = Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, texWidth, texHeight, 4, null);
texImage = new BufferedImage(this.glAlphaColorModel, raster, false, new Hashtable<String, Object>());
}
else
{
raster = Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, texWidth, texHeight, 3, null);
texImage = new BufferedImage(this.glColorModel, raster, false, new Hashtable<String, Object>());
}
Graphics g = texImage.getGraphics();
g.setColor(new Color(0f, 0f, 0f, 0f));
g.fillRect(0, 0, texWidth, texHeight);
g.drawImage(bi, 0, 0, null);
byte[] data = ((DataBufferByte) texImage.getRaster().getDataBuffer()).getData();
texData = ByteBuffer.allocateDirect(data.length);
texData.order(ByteOrder.nativeOrder());
texData.put(data, 0, data.length);
texData.flip();
glTexParameteri(GL11.GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL11.GL_LINEAR);
glTexParameteri(GL11.GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL11.GL_LINEAR);
glTexImage2D(GL11.GL_TEXTURE_2D, 0, GL11.GL_RGBA, bi.getWidth(), bi.getHeight(), 0, format, GL_UNSIGNED_BYTE, texData);
return id;
}
catch (Exception e)
{
e.printStackTrace();
System.exit(0);
return 0;
}
}
}
And the actual quad:
CivilPolitica.instance.renderer.bindTexture("test.png");
GL11.glPushMatrix();
GL11.glTranslatef(128, 128, 0);
GL11.glBegin(GL11.GL_QUADS);
GL11.glTexCoord2f(0, 0);
GL11.glVertex2i(0, 0);
GL11.glTexCoord2f(0, 127);
GL11.glVertex2i(0, 128);
GL11.glTexCoord2f(127, 127);
GL11.glVertex2i(128, 128);
GL11.glTexCoord2f(127, 0);
GL11.glVertex2i(128, 0);
GL11.glEnd();
GL11.glPopMatrix();
GL11.glTexCoord2f(0, 0);
GL11.glVertex2i(0, 0);
GL11.glTexCoord2f(0, 127);
GL11.glVertex2i(0, 128);
GL11.glTexCoord2f(127, 127);
GL11.glVertex2i(128, 128);
GL11.glTexCoord2f(127, 0);
GL11.glVertex2i(128, 0);
must be
GL11.glTexCoord2f(0.0f, 0.0f);
GL11.glVertex2i(0, 0);
GL11.glTexCoord2f(0.0f, 1.0f);
GL11.glVertex2i(0, 128);
GL11.glTexCoord2f(1.0f, 1.0f);
GL11.glVertex2i(128, 128);
GL11.glTexCoord2f(1.0f, 0.0f);
GL11.glVertex2i(128, 0);
because it are texture coordinates from 0.0f to 1.0f (0.0f ist the one side and 1.0f is the other, that way it is not resolution dependent)