Spotlight not working - java

I'm trying to get a spotlight working but it's not showing up could someone have a look and see where I've went wrong?
package water3;
import Common.TextureReader;
import com.sun.opengl.util.BufferUtil;
import java.io.IOException;
import javax.media.opengl.glu.GLUquadric;
import javax.media.opengl.GL;
import javax.media.opengl.GLAutoDrawable;
import javax.media.opengl.GLEventListener;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import javax.media.opengl.glu.GLU;
class Renderer implements GLEventListener {
private GLUquadric quadric;
private Object3D object3D;
private float[] LightPos = {0.0f, 5.0f, -4.0f, 1.0f}; // Light Position
private boolean LightUp, LightDown, LightLeft, LightRight, LightForward, LightBackward;
private int[] textures = new int[3]; // Storage For 3 Textures
double aNum = 1;
private boolean aDown =false;
private boolean up =false;
private GLU glu = new GLU();
public void init(GLAutoDrawable drawable) {
GL gl = drawable.getGL();
try {
loadGLTextures(drawable);
} catch (IOException e) {
System.out.println("Couldn't load model/Texture");
throw new RuntimeException(e);
}
/*
gl.glShadeModel(GL.GL_SMOOTH); // Enable Smooth Shading
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.5f); // Black Background
gl.glClearDepth(1.0f); // Depth Buffer Setup
gl.glClearStencil(0); // Stencil Buffer Setup
gl.glEnable(GL.GL_DEPTH_TEST); // Enables Depth Testing
gl.glDepthFunc(GL.GL_LEQUAL); // The Type Of Depth Testing To Do
gl.glHint(GL.GL_PERSPECTIVE_CORRECTION_HINT, GL.GL_NICEST); // Really Nice Perspective Calculations
/* gl.glLightfv(GL.GL_LIGHT1, GL.GL_POSITION, LightPos, 0); // Set Light1 Position
gl.glLightfv(GL.GL_LIGHT1, GL.GL_AMBIENT, LightAmb, 0); // Set Light1 Ambience
gl.glLightfv(GL.GL_LIGHT1, GL.GL_DIFFUSE, LightDif, 0); // Set Light1 Diffuse
gl.glLightfv(GL.GL_LIGHT1, GL.GL_SPECULAR, LightSpc, 0); // Set Light1 Specular
gl.glEnable(GL.GL_LIGHT1);
*/// Enable Light1
// Enable Lighting
/*
gl.glMaterialfv(GL.GL_FRONT, GL.GL_AMBIENT, MatAmb, 0); // Set Material Ambience
gl.glMaterialfv(GL.GL_FRONT, GL.GL_DIFFUSE, MatDif, 0); // Set Material Diffuse
gl.glMaterialfv(GL.GL_FRONT, GL.GL_SPECULAR, MatSpc, 0); // Set Material Specular
gl.glMaterialfv(GL.GL_FRONT, GL.GL_SHININESS, MatShn, 0); // Set Material Shininess
*/
gl.glClearColor(0,0,0,1);
gl.glEnable(GL.GL_LIGHT0);
gl.glEnable(GL.GL_DEPTH_TEST);
gl.glShadeModel(GL.GL_SMOOTH);
gl.glLightModeli(GL.GL_LIGHT_MODEL_TWO_SIDE, GL.GL_TRUE);
gl.glCullFace(GL.GL_BACK); // Set Culling Face To Back Face
gl.glEnable(GL.GL_CULL_FACE); // Enable Culling
gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // Set Clear Color (Greenish Color)
quadric = glu.gluNewQuadric(); // Initialize Quadratic
glu.gluQuadricNormals(quadric, GL.GL_SMOOTH); // Enable Smooth Normal Generation
glu.gluQuadricTexture(quadric, false);
}
public void display(GLAutoDrawable drawable) {
GL gl = drawable.getGL();
// Clear Color Buffer, Depth Buffer, Stencil Buffer
gl.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT | GL.GL_STENCIL_BUFFER_BIT);
gl.glLoadIdentity();
SetSpotlight(gl);
SetFloorMaterial(gl);
gl.glPushMatrix();
// Reset Modelview Matrix
gl.glTranslatef(0.0f, 0.0f, -20.0f); // Zoom Into The Screen 20 Units
gl.glEnable(GL.GL_TEXTURE_2D); // Enable Texture Mapping ( NEW )
drawRoom(gl); // Draw The Room
gl.glPopMatrix();
gl.glFlush(); // Flush The OpenGL Pipeline
}
private void drawRoom(GL gl) { // Draw The Room (Box)
gl.glBegin(GL.GL_QUADS); // Begin Drawing Quads
// Floor
gl.glNormal3f(0.0f, 1.0f, 0.0f); // Normal Pointing Up
gl.glVertex3f(-20.0f, -20.0f, -40.0f); // Back Left
gl.glVertex3f(-20.0f, -20.0f, 40.0f); // Front Left
gl.glVertex3f(20.0f, -20.0f, 40.0f); // Front Right
gl.glVertex3f(20.0f, -20.0f, -40.0f); // Back Right
// Ceiling
gl.glNormal3f(0.0f, -1.0f, 0.0f); // Normal Point Down
gl.glVertex3f(-10.0f, 10.0f, 20.0f); // Front Left
gl.glVertex3f(-10.0f, 10.0f, -20.0f); // Back Left
gl.glVertex3f(10.0f, 10.0f, -20.0f); // Back Right
gl.glVertex3f(10.0f, 10.0f, 20.0f); // Front Right
// Back Wall
gl.glNormal3f(0.0f, 0.0f, -1.0f); // Normal Pointing Towards Viewer
gl.glVertex3f(20.0f, 20.0f, 30.0f); // Top Right
gl.glVertex3f(20.0f, -20.0f, 30.0f); // Bottom Right
gl.glVertex3f(-20.0f, -20.0f, 30.0f); // Bottom Left
gl.glVertex3f(-20.0f, 20.0f, 30.0f); // Top Left
// Left Wall
gl.glNormal3f(1.0f, 0.0f, 0.0f); // Normal Pointing Right
gl.glVertex3f(-20.0f, 20.0f, 30.0f); // Top Front
gl.glVertex3f(-20.0f, -20.0f, 30.0f); // Bottom Front
gl.glVertex3f(-20.0f, -20.0f, -30.0f); // Bottom Back
gl.glVertex3f(-20.0f, 20.0f, -30.0f); // Top Back
// Right Wall
gl.glNormal3f(-1.0f, 0.0f, 0.0f); // Normal Pointing Left
gl.glVertex3f(20.0f, 20.0f, -30.0f); // Top Back
gl.glVertex3f(20.0f, -20.0f, -30.0f); // Bottom Back
gl.glVertex3f(20.0f, -20.0f, 30.0f); // Bottom Front
gl.glVertex3f(20.0f, 20.0f, 30.0f); // Top Front
gl.glPushMatrix();
// Front Wall
gl.glNormal3f(0.0f, 0.0f, 1.0f); // Normal Pointing Away From Viewer
gl.glTexCoord2f(1,1);
gl.glVertex3f(-20.0f, 20.0f, -30.0f); // Top Left
gl.glTexCoord2f(1,0);
gl.glVertex3f(-20.0f, -20.0f, -30.0f); // Bottom Left
gl.glTexCoord2f(0,0);
gl.glVertex3f(20.0f, -20.0f, -30.0f); // Bottom Right
gl.glTexCoord2f(0,1);
gl.glVertex3f(20.0f, 20.0f, -30.0f); // Top Right
gl.glPopMatrix();
gl.glEnd(); // Done Drawing Quads
}
public void reshape(GLAutoDrawable drawable,int xstart,int ystart,int width,int height) {
GL gl = drawable.getGL();
height = (height == 0) ? 1 : height;
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL.GL_PROJECTION);
gl.glLoadIdentity();
gl.glRotatef(90, 0.0f, 0.0f, 1.0f);
glu.gluPerspective(60, (float) width / height, 1, 1000);
glu.gluLookAt(1.0f,0.0f,25.0f,
0.0f,0.0f,0.0f,
0.0f,0.0f,1.0f);
gl.glMatrixMode(GL.GL_MODELVIEW);
gl.glLoadIdentity();
}
public void displayChanged(GLAutoDrawable drawable, boolean modeChanged,boolean deviceChanged) { }
private void loadGLTextures(GLAutoDrawable gldrawable) throws IOException {
TextureReader.Texture texture = null;
texture = TextureReader.readTexture("data/images/04.bmp");
GL gl = gldrawable.getGL();
//Create Nearest Filtered Texture
gl.glGenTextures(1, textures, 0);
gl.glBindTexture(GL.GL_TEXTURE_2D, textures[0]);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR);
gl.glTexImage2D(GL.GL_TEXTURE_2D,
0,
3,
texture.getWidth(),
texture.getHeight(),
0,
GL.GL_RGB,
GL.GL_UNSIGNED_BYTE,
texture.getPixels());
}
public void SetSpotlight(GL gl){
gl.glDisable(GL.GL_LIGHTING);
// prepare spotlight
float spot_ambient[] = {50.2f,0.0f,0.0f,1.0f };//white ={10.2f,10.2f,10.2f,1.0f };
float spot_diffuse[] = {50.8f,0.0f,0.0f,1.0f };
float spot_specular[] = {50.8f,0.0f,0.0f,1.0f };
// set colors here and do the geometry in draw
gl.glLightfv(GL.GL_LIGHT0, GL.GL_AMBIENT, spot_ambient,0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_DIFFUSE, spot_diffuse,0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_SPECULAR, spot_specular,0);
gl.glEnable(GL.GL_LIGHTING);
gl.glEnable(GL.GL_LIGHT0);
// set light position
// since ligth follows the model when mousing
// spotlight as it moves with the scene
float spot_position[] = {0.0f,60.0f,-30.0f,1.0f};
float spot_direction[] = {0.0f,0.0f,-1.0f};
float spot_angle = 10.0f;
gl.glLightfv(GL.GL_LIGHT0, GL.GL_POSITION, spot_position,0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_SPOT_DIRECTION,spot_direction,0);
gl.glLightf(GL.GL_LIGHT0, GL.GL_SPOT_CUTOFF,(float)spot_angle);
// "smoothing" the border of the lightcone
// change this for effect
gl.glMaterialfv(GL.GL_FRONT, GL.GL_AMBIENT_AND_DIFFUSE, new float[] {0.7f,0.7f,1}, 0 );
gl.glLighti(GL.GL_LIGHT0, GL.GL_SPOT_EXPONENT, 20);
gl.glEnable(GL.GL_LIGHTING);
}
public void SetFloorMaterial(GL gl){
float amb[]={0.3f,0.3f,0.0f,1.0f};
float diff[]={1.0f,1.0f,0.5f,1.0f};
float spec[]={0.6f,0.6f,0.5f,1.0f};
float shine=0.25f;
gl.glMaterialfv(GL.GL_FRONT,GL.GL_AMBIENT,amb,0);
gl.glMaterialfv(GL.GL_FRONT,GL.GL_DIFFUSE,diff,0);
gl.glMaterialfv(GL.GL_FRONT,GL.GL_SPECULAR,spec,0);
gl.glMaterialf(GL.GL_FRONT,GL.GL_SHININESS,shine*128.0f);
}
}

OpenGL's fixed function pipeline does illumination only at vertices and then just interpolates. Since the walls of your rooms are just very large quads, every wall has only 4 vertices, so only 4 lighting calculations happen. For a spotlight effect you'd need a lot more lighting calculations. So you've to refine (i.e. tesselate) the walls' meshes, or use per pixel lighting (i.e. do lighting calculations on the fragment level – this is done using shaders).

Related

OpenGL ES - Too much work on main thread

I'm trying to render a 3D cube with different textures on each face (a dice), it works flawlessly when I only render the cube with one of the same image, but the cube doesn't even display and the following is thrown in the android monitor when I try to do the former:
Skipped 36 frames! The application may be doing too much work on its main thread.
After some research I'm aware that I could run this "heavy processing" in its own thread but I'm lost as to how to go about it. I'm relatively new to OpenGL rendering so I don't know whether my code is super optimal either.
I'm thinking I should run the Dice3D.java in another thread? This is the only class that doesn't extend or implement anything and through a little research I have found the class needs to implement runnable?
Any help in making this work would be great, thanks in advance!
Here are my classes:
OpenGLRenderer.java
public class OpenGLRenderer implements GLSurfaceView.Renderer {
private Context context;
private Dice3D dice3D;
public OpenGLRenderer(Context context, FrameLayout openGLLayout) {
this.context = context;
dice3D = new Dice3D(context);
GLSurfaceView glView = new GLSurfaceView(context);
glView.setRenderer(this);
//put to welcome layout
openGLLayout.addView(glView, new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT));
}
private float mCubeRotation;
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
gl.glClearColor(1.0f, 1.0f, 1.0f, 1.0f); // Set color's clear-value to black
gl.glClearDepthf(1.0f); // Set depth's clear-value to farthest
gl.glEnable(GL10.GL_DEPTH_TEST); // Enables depth-buffer for hidden surface removal
gl.glDepthFunc(GL10.GL_LEQUAL); // The type of depth testing to do
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST); // nice perspective view
gl.glShadeModel(GL10.GL_SMOOTH); // Enable smooth shading of color
gl.glDisable(GL10.GL_DITHER); // Disable dithering for better performance
// Setup Texture, each time the surface is created (NEW)
dice3D.loadTexture(gl); // Load images into textures (NEW)
gl.glEnable(GL10.GL_TEXTURE_2D); // Enable texture (NEW)
}
#Override
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glLoadIdentity();
gl.glTranslatef(0.0f, 0.0f, -6.0f);
gl.glRotatef(mCubeRotation, 1.0f, 1.0f, 1.0f);
dice3D.draw(gl);
mCubeRotation -= 0.15f;
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
GLU.gluPerspective(gl, 45.0f, (float)width / (float)height, 0.1f, 100.0f);
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
}
}
Dice3D.java
public class Dice3D {
private FloatBuffer vertexBuffer; // Buffer for vertex-array
private FloatBuffer texBuffer; // Buffer for texture-coords-array (NEW)
private int numFaces = 6;
private int[] imageFileIDs = { // Image file IDs
R.drawable.one,
R.drawable.two,
R.drawable.three,
R.drawable.four,
R.drawable.five,
R.drawable.six
};
private int[] textureIDs = new int[numFaces];
private Bitmap[] bitmap = new Bitmap[numFaces];
// Constructor - Set up the buffers
public Dice3D(Context context) {
// Setup vertex-array buffer. Vertices in float. An float has 4 bytes
ByteBuffer vbb = ByteBuffer.allocateDirect(12 * 4 * numFaces);
vbb.order(ByteOrder.nativeOrder()); // Use native byte order
vertexBuffer = vbb.asFloatBuffer(); // Convert from byte to float
for (int face = 0; face < numFaces; face++) {
bitmap[face] = BitmapFactory.decodeStream(context.getResources().openRawResource(imageFileIDs[face]));
float[] vertices = { // Vertices for a face
-1.0f, -1.0f, 0.0f, // 0. left-bottom-front
1.0f, -1.0f, 0.0f, // 1. right-bottom-front
-1.0f, 1.0f, 0.0f, // 2. left-top-front
1.0f, 1.0f, 0.0f // 3. right-top-front
};
vertexBuffer.put(vertices); // Populate
}
vertexBuffer.position(0); // Rewind
float[] texCoords = { // Texture coords for the above face (NEW)
0.0f, 1.0f, // A. left-bottom (NEW)
1.0f, 1.0f, // B. right-bottom (NEW)
0.0f, 0.0f, // C. left-top (NEW)
1.0f, 0.0f // D. right-top (NEW)
};
// Setup texture-coords-array buffer, in float. An float has 4 bytes (NEW)
ByteBuffer tbb = ByteBuffer.allocateDirect(texCoords.length * 4 * numFaces);
tbb.order(ByteOrder.nativeOrder());
texBuffer = tbb.asFloatBuffer();
for (int face = 0; face < numFaces; face++) {
texBuffer.put(texCoords);
}
texBuffer.position(0);
}
// Draw the shape
public void draw(GL10 gl) {
gl.glFrontFace(GL10.GL_CCW); // Front face in counter-clockwise orientation
gl.glEnable(GL10.GL_CULL_FACE); // Enable cull face
gl.glCullFace(GL10.GL_BACK); // Cull the back face (don't display)
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY); // Enable texture-coords-array (NEW)
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, texBuffer); // Define texture-coords buffer (NEW)
// front
gl.glPushMatrix();
gl.glTranslatef(0.0f, 0.0f, 1.0f);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glPopMatrix();
// left
gl.glPushMatrix();
gl.glRotatef(270.0f, 0.0f, 1.0f, 0.0f);
gl.glTranslatef(0.0f, 0.0f, 1.0f);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glPopMatrix();
// back
gl.glPushMatrix();
gl.glRotatef(180.0f, 0.0f, 1.0f, 0.0f);
gl.glTranslatef(0.0f, 0.0f, 1.0f);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glPopMatrix();
// right
gl.glPushMatrix();
gl.glRotatef(90.0f, 0.0f, 1.0f, 0.0f);
gl.glTranslatef(0.0f, 0.0f, 1.0f);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glPopMatrix();
// top
gl.glPushMatrix();
gl.glRotatef(270.0f, 1.0f, 0.0f, 0.0f);
gl.glTranslatef(0.0f, 0.0f, 1.0f);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glPopMatrix();
// bottom
gl.glPushMatrix();
gl.glRotatef(90.0f, 1.0f, 0.0f, 0.0f);
gl.glTranslatef(0.0f, 0.0f, 1.0f);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glPopMatrix();
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY); // Disable texture-coords-array
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisable(GL10.GL_CULL_FACE);
}
// Load an image into GL texture
public void loadTexture(GL10 gl) {
gl.glGenTextures(6, textureIDs, 0); // Generate texture-ID array for 6 IDs
// Generate OpenGL texture images
for (int face = 0; face < numFaces; face++) {
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureIDs[face]);
// Build Texture from loaded bitmap for the currently-bind texture ID
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap[face], 0);
bitmap[face].recycle();
}
}
}
and finally my activity Welcome.java
public class Welcome extends AppCompatActivity {
LinearLayout l1,l2;
public Button btnsub;
Animation uptodown,downtoup;
#Override
protected void onCreate(Bundle savedInstanceState) {
requestWindowFeature(Window.FEATURE_NO_TITLE);
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_welcome);
FrameLayout openGLLayout = (FrameLayout) findViewById(R.id.frameLayout1);
//creates an openGL surface and renders it to the framelayout in the activity layout
OpenGLRenderer gl3DView = new OpenGLRenderer(this, openGLLayout);
btnsub = (Button)findViewById(R.id.buttonsub);
btnsub.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Intent test = new Intent(Welcome.this,DiceGame.class);
startActivity(test);
}
});
l1 = (LinearLayout) findViewById(R.id.l1);
l2 = (LinearLayout) findViewById(R.id.l2);
uptodown = AnimationUtils.loadAnimation(this,R.anim.uptodown);
downtoup = AnimationUtils.loadAnimation(this,R.anim.downtoup);
l1.setAnimation(uptodown);
l2.setAnimation(downtoup);
}
}
In terms of performance:
1) Learn OpenGL ES 2.0 and use shaders; it's a much cleaner API with a lot less messing about needed in the application and/or in the driver stack. All of the new APIs are shader-based, so this is a good thing to learn. I wouldn't consider using OpenGL ES 1.x for any new projects as it's a dead-end from a technology point of view.
(2) Once using OpenGL ES 2.0 or newer the use buffer objects for storing vertex data, and upload the data at the start of the application, nor per frame. It shouldn't really matter in this case (cube is very simple), but it's a good habit to get in to.

Lighting a textured object in OpenGL 2.0+

I’ve been developing a cube program that provides a number of cubes with desired qualities. However, whenever I try to light a textured cube, my cube becomes very dark. The lighting works well with a non-textured cube so I’m led to believe it’s done properly just as a simple textured cube without lighting works. There doesn’t seem to be significant documentation on how to solve this in OpenGL 2.0+ but there are a few things pertaining to older versions.
The following link offers information as to why my cube is behaving as it is, but I’m having trouble translating the solution to a newer version, especially within my shader code where I’m unsure if further changes should occur. I am using Android Studio 2.1.3 if that and its contained emulators would pose issues to the desired effect. If anyone could offer any advice, I’d greatly appreciate it. I have a separate (large) renderer that calls for the Cube to be drawn, let me know if that code would be beneficial as well in addition to my Cube. Below is my Cube:
public class TexturedLightCube {
/** Cube vertices */
private static final float VERTICES[] = {
-0.3f, -0.3f, -0.3f, //top front right
0.3f, -0.3f, -0.3f, //bottom front right
0.3f, 0.3f, -0.3f, //bottom front left
-0.3f, 0.3f, -0.3f, //top front left
-0.3f, -0.3f, 0.3f, //top back right
0.3f, -0.3f, 0.3f, //bottom back right
0.3f, 0.3f, 0.3f, //bottom back left
-0.3f, 0.3f, 0.3f // top back left
};
/** Vertex colors. */
private static final float COLORS[] = {
0.0f, 1.0f, 1.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f,
0.0f, 1.0f, 0.0f, 1.0f,
0.0f, 0.0f, 1.0f, 1.0f,
1.0f, 0.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f, 1.0f,
0.0f, 1.0f, 1.0f, 1.0f,
};
/** Order to draw vertices as triangles. */
private static final byte INDICES[] = {
0, 1, 3, 3, 1, 2, // Front face.
0, 1, 4, 4, 5, 1, // Bottom face.
1, 2, 5, 5, 6, 2, // Right face.
2, 3, 6, 6, 7, 3, // Top face.
3, 7, 4, 4, 3, 0, // Left face.
4, 5, 7, 7, 6, 5, // Rear face.
};
private static final float TEXTURECOORDS[] =
{
0.0f, 1.0f, //left-bottom
0.0f, 0.0f, //right bottom
1.0f, 0.0f, //left top
1.0f, 1.0f, //right top
0.0f, 1.0f, //left-bottom
0.0f, 0.0f, //right bottom
1.0f, 0.0f, //left top
1.0f, 1.0f, //right top
};
private static final float NORMALS[] = {
//set all normals to all light for testing
1.0f, 1.0f, 1.0f, //top front right
1.0f, 0.0f, 1.0f, //bottom front right
0.0f, 0.0f, 1.0f, //bottom front left
0.0f, 1.0f, 1.0f, //top front left
1.0f, 1.0f, 0.0f, //top back right
1.0f, 0.0f, 0.0f, //bottom back right
0.0f, 0.0f, 0.0f, //bottom back left
0.0f, 1.0f, 0.0f //top back left
};
static final int COORDS_PER_VERTEX = 3;
private static final int VALUES_PER_COLOR = 4;
/** Vertex size in bytes. */
final int VERTEX_STRIDE = COORDS_PER_VERTEX * 4;
/** Color size in bytes. */
private final int COLOR_STRIDE = VALUES_PER_COLOR * 4;
/** Shader code for the vertex. */
private static final String VERTEX_SHADER_CODE =
"uniform mat4 uMVPMatrix;" +
"uniform mat4 uMVMatrix;" +
"uniform vec3 u_LightPos;" +
"attribute vec4 vPosition;" +
"attribute vec4 a_Color;" +
"attribute vec3 a_Normal;" +
"varying vec4 v_Color;" +
"attribute vec2 a_TexCoordinate;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
"vec3 modelViewVertex = vec3(uMVMatrix * vPosition);"+
"vec3 modelViewNormal = vec3(uMVMatrix * vec4(a_Normal, 0.0));" +
"float distance = length(u_LightPos - modelViewVertex);" +
"vec3 lightVector = normalize(u_LightPos - modelViewVertex);" +
"float diffuse = max(dot(modelViewNormal, lightVector), 0.1);" +
"diffuse = diffuse * (1.0/(1.0 + (0.00000000000002 * distance * distance)));" + //attenuation factor
"v_Color = a_Color * a_Color * diffuse;" +
"gl_Position = uMVPMatrix * vPosition;" +
"v_TexCoordinate = a_TexCoordinate;" +
"}";
/** Shader code for the fragment. */
private static final String FRAGMENT_SHADER_CODE =
"precision mediump float;" +
"varying vec4 v_Color;" +
"uniform sampler2D u_Texture;"+ //The input texture
"varying vec2 v_TexCoordinate;" +
"void main() {" +
" gl_FragColor = v_Color * texture2D(u_Texture, v_TexCoordinate) ;" + //still works with just color
"}";
private int mTextureUniformHandle; //Pass in texture.
private int mTextureCoordinateHandle; //Pass in model texture coordinate information.
private final int mTextureCoordinateDataSize = 2; //Size of texture coordinate data in elements
public static int mTextureDataHandle; //Handle to texturedata;
private final FloatBuffer mTextureBuffer; //Store model data in float buffer.
private final FloatBuffer mVertexBuffer;
private final FloatBuffer mColorBuffer;
private final FloatBuffer mNormalBuffer;
private final ByteBuffer mIndexBuffer;
private final int mProgram;
private final int mPositionHandle;
private final int mColorHandle;
private final int mMVPMatrixHandle;
private final int mNormalHandle;
public static int mLightPosHandle;
public final int mMVMatrixHandle;
public static int loadTexture(final Context context, final int resourceId) {
//Get the texture from the Android resource directory
final int[] textureHandle = new int[1];
InputStream is = context.getResources().openRawResource(+ R.drawable.teneighty);
Bitmap bitmap = null;
try {
//BitmapFactory is an Android graphics utility for images
bitmap = BitmapFactory.decodeStream(is);
} finally {
//Always clear and close
try {
is.close();
is = null;
} catch (IOException e) {
}
}
//Generate one texture pointer...
GLES20.glGenTextures(1, textureHandle, 0);
//and bind it to our array.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
//Create Nearest Filtered Texture.
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
//Accounting for different texture parameters.
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
//Use the Android GLUtils to specify a two-dimensional texture image from our bitmap.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
//Clean up
bitmap.recycle();
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture");
}
return textureHandle[0];
}
public TexturedLightCube() {
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(VERTICES.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
mVertexBuffer = byteBuffer.asFloatBuffer();
mVertexBuffer.put(VERTICES);
mVertexBuffer.position(0);
byteBuffer = ByteBuffer.allocateDirect(COLORS.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
mColorBuffer = byteBuffer.asFloatBuffer();
mColorBuffer.put(COLORS);
mColorBuffer.position(0);
byteBuffer = ByteBuffer.allocateDirect(NORMALS.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
mNormalBuffer = byteBuffer.asFloatBuffer();
mNormalBuffer.put(NORMALS);
mNormalBuffer.position(0);
byteBuffer = ByteBuffer.allocateDirect(TEXTURECOORDS.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
mTextureBuffer = byteBuffer.asFloatBuffer();
mTextureBuffer.put(TEXTURECOORDS);
mTextureBuffer.position(0);
mIndexBuffer = ByteBuffer.allocateDirect(INDICES.length);
mIndexBuffer.put(INDICES);
mIndexBuffer.position(0);
mProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(mProgram, loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_CODE));
GLES20.glAttachShader(mProgram, loadShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_CODE));
GLES20.glLinkProgram(mProgram);
mTextureDataHandle = GLES20.glGetUniformLocation(mProgram, "u_Texture");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "a_TexCoordinate");
mTextureUniformHandle = GLES20.glGetUniformLocation(mProgram, "u_Texture");
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
mMVMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVMatrix");
mLightPosHandle = GLES20.glGetUniformLocation(mProgram, "u_LightPos");
mNormalHandle = GLES20.glGetAttribLocation(mProgram, "a_Normal");
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
mColorHandle = GLES20.glGetAttribLocation(mProgram, "a_Color");
}
/**
* Encapsulates the OpenGL ES instructions for drawing this shape.
*
* #param mvpMatrix The Model View Project matrix in which to draw this shape
*/
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment.
GLES20.glUseProgram(mProgram);
//set active texture unit to texture unit 0.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
// Prepare the cube coordinate data.
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, VERTEX_STRIDE, mVertexBuffer);
// Prepare the cube color data.
GLES20.glEnableVertexAttribArray(mColorHandle);
GLES20.glVertexAttribPointer(mColorHandle, 4, GLES20.GL_FLOAT, false, COLOR_STRIDE, mColorBuffer);
//Will have the same size as Vertex as we are implementing per vertex lighting
GLES20.glEnableVertexAttribArray(mNormalHandle);
GLES20.glVertexAttribPointer(mNormalHandle, 3, GLES20.GL_FLOAT, false, VERTEX_STRIDE, mNormalBuffer);
// Prepare the cube texture data.
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
//Pass texture coordinate information.
GLES20.glVertexAttribPointer(mTextureCoordinateHandle,4, GLES20.GL_FLOAT, false, mTextureCoordinateDataSize, mTextureBuffer);
// Apply the projection and view transformation.
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
GLES20.glUniform3f(LightCube.mLightPosHandle, MyGLRenderer.mLightPosInEyeSpace[0], MyGLRenderer.mLightPosInEyeSpace[1], MyGLRenderer.mLightPosInEyeSpace[2]);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glUniform1i(mTextureUniformHandle, 0);
// Draw the cube.
GLES20.glDrawElements(GLES20.GL_TRIANGLES, INDICES.length, GLES20.GL_UNSIGNED_BYTE, mIndexBuffer); //-removed indices-
// Disable vertex arrays.
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(mTextureCoordinateHandle);
GLES20.glDisableVertexAttribArray(mColorHandle);
GLES20.glDisableVertexAttribArray(mNormalHandle);
}
/** Loads the provided shader in the program. */
private static int loadShader(int type, String shaderCode){
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
You're missing an ambient component to your lighting, which emulates second order (and higher) reflections you would get in real life, but can't get directly in a rasterizer.
Not sure why you are squaring a_Color in your fragment shader. This will definitely make things darker because all values are between 0 and 1; e.g. 0.1^2 == 0.01.
Remember that your dot product might be negative, so you want to clamp out negative diffuse components (e.g. no light intensity on surfaces which are facing away from the light).

Applying map of the earth texture a Sphere

i been trying to implement a 3D animation in openGL (using JOGL) of a solar system so far i have 5 planets of different sizes but the problem i seem to be having is i cant add a map of the earth texture on a Sphere can anybody help me on how its done?
This is the code i have so far in my Display method:
#Override
public void display(GLAutoDrawable drawable) {
GL2 gl = drawable.getGL().getGL2();
GLU glu = new GLU();
gl.glClear(GL.GL_COLOR_BUFFER_BIT);
//make sure we are in model_view mode
gl.glMatrixMode(GL2.GL_MODELVIEW);
gl.glLoadIdentity();
glu.gluLookAt(10,20,20,0,3,0,0, 20, 0);
//gl.glMatrixMode(GL2.GL_PROJECTION);
//glu.gluPerspective(45,1,1,25);
//render ground plane
gl.glPushMatrix();
gl.glTranslatef(-10.75f, 3.0f, -1.0f);
gl.glColor3f(0.3f, 0.5f, 1f);
GLUquadric earth = glu.gluNewQuadric();
glu.gluQuadricDrawStyle(earth, GLU.GLU_FILL);
glu.gluQuadricNormals(earth, GLU.GLU_FLAT);
glu.gluQuadricOrientation(earth, GLU.GLU_OUTSIDE);
final float radius = 3.378f;
final int slices = 89;
final int stacks = 16;
glu.gluSphere(earth, radius, slices, stacks);
glu.gluDeleteQuadric(earth);
Texture earths;
try {
earths = TextureIO.newTexture(new File("earth.png"), true);
}
catch (IOException e) {
javax.swing.JOptionPane.showMessageDialog(null, e);
}
gl.glPopMatrix();
//gl.glEnd();
gl.glPushMatrix();
gl.glTranslatef(2.75f, 3.0f, -0.0f);
gl.glColor3f(0.3f, 0.5f, 1f);
GLUquadric earth1 = glu.gluNewQuadric();
glu.gluQuadricDrawStyle(earth1, GLU.GLU_FILL);
glu.gluQuadricNormals(earth1, GLU.GLU_FLAT);
glu.gluQuadricOrientation(earth1, GLU.GLU_OUTSIDE);
final float radius1 = 3.378f;
final int slices1 = 90;
final int stacks1 = 63;
glu.gluSphere(earth1, radius1, slices1, stacks1);
glu.gluDeleteQuadric(earth1);
gl.glPopMatrix();
gl.glPushMatrix();
gl.glTranslatef(3.75f, 6.0f, -7.20f);
gl.glColor3f(0.3f, 0.5f, 1f);
GLUquadric earth3 = glu.gluNewQuadric();
glu.gluQuadricDrawStyle(earth3, GLU.GLU_FILL);
glu.gluQuadricNormals(earth3, GLU.GLU_FLAT);
glu.gluQuadricOrientation(earth1, GLU.GLU_OUTSIDE);
final float radius3 = 1.878f;
final int slices3 = 89;
final int stacks3 = 16;
glu.gluSphere(earth3, radius3, slices3, stacks3);
glu.gluDeleteQuadric(earth3);
gl.glPopMatrix();
gl.glPushMatrix();
gl.glTranslatef(12.75f, 2.0f, -7.20f);
gl.glColor3f(0.3f, 0.5f, 1f);
GLUquadric earth4 = glu.gluNewQuadric();
glu.gluQuadricDrawStyle(earth4, GLU.GLU_FILL);
glu.gluQuadricNormals(earth4, GLU.GLU_FLAT);
glu.gluQuadricOrientation(earth4, GLU.GLU_OUTSIDE);
final float radius4 = 1.078f;
final int slices4 = 89;
final int stacks4 = 16;
glu.gluSphere(earth4, radius4, slices4, stacks4);
glu.gluDeleteQuadric(earth4);
gl.glPopMatrix();
gl.glPushMatrix();
gl.glTranslatef(2.75f, -6.0f, -0.0f);
gl.glColor3f(0.3f, 0.5f, 1f);
GLUquadric earth5 = glu.gluNewQuadric();
glu.gluQuadricDrawStyle(earth5, GLU.GLU_FILL);
glu.gluQuadricNormals(earth5, GLU.GLU_FLAT);
glu.gluQuadricOrientation(earth5, GLU.GLU_OUTSIDE);
final float radius5 = 3.778f;
final int slices5 = 90;
final int stacks5 = 63;
glu.gluSphere(earth5, radius5, slices5, stacks5);
glu.gluDeleteQuadric(earth5);
gl.glPopMatrix();
}
create your own sphere mesh
simple 2D loop through 2 angles (spherical coordinate system 2 Cartesian). You can easily add ellipsoid properties (earth is not a sphere) if you want more precision. If not then you can use single sphere mesh for all planets and just scale it before use ...
let a be the longitude and b the latitude so loop a from 0 to 2*PI [rad] and b from -0.5*PI to +0.5*PI [rad] where PI=3.1415... is the Pi (in C++ math.h it is called M_PI). If your math api uses degrees then convert to degrees PI [rad] = 180.0 [deg]
add necessary info per vertex
normals for lighting
// just unit sphere
nx=cos(b)*cos(a);
ny=cos(b)*sin(a);
nz=sin(b);
texture coordinate (assuming rectangle non distorted image)
// just convert a,b to <0,1> range
tx=a/(2.0*PI)
ty=(b/PI)+0.5;
vertex position
// just sphere(rx=ry=rz=r) or ellipsoid (rx=ry=equatorial and rz=polar radius)
// can also use rx*nx,ry*ny,rz*nz instead ...
x=rx*cos(b)*cos(a);
y=ry*cos(b)*sin(a);
z=rz*sin(b);
send all of this to OpenGL
so all above store in some memory space (CPU or GPU) and then send to rendering. You can use legacy glBegin(QUAD_STRIP); ... glEnd(); or displaylist/VBO/VAO. Bind the right texture before each planet/body and do not forget to update ModelView matrix too. This is how mine coordinate systems looks like:
Also have a look at these related Q/As:
realistic n-body solar system
sphere mesh by subdivision
[edit1] C++ example
//---------------------------------------------------------------------------
const int nb=15; // slices
const int na=nb<<1; // points per equator
class planet
{
public:
bool _init; // has been initiated ?
GLfloat x0,y0,z0; // center of planet [GCS]
GLfloat pos[na][nb][3]; // vertex
GLfloat nor[na][nb][3]; // normal
GLfloat txr[na][nb][2]; // texcoord
GLuint txrid; // texture id
GLfloat t; // dayly rotation angle [deg]
planet() { _init=false; txrid=0; x0=0.0; y0=0.0; z0=0.0; t=0.0; }
~planet() { if (_init) glDeleteTextures(1,&txrid); }
void init(GLfloat r,AnsiString texture); // call after OpenGL is already working !!!
void draw();
};
void planet::init(GLfloat r,AnsiString texture)
{
if (!_init) { _init=true; glGenTextures(1,&txrid); }
GLfloat x,y,z,a,b,da,db;
GLfloat tx0,tdx,ty0,tdy;// just correction if CLAMP_TO_EDGE is not available
int ia,ib;
// a,b to texture coordinate system
tx0=0.0;
ty0=0.5;
tdx=0.5/M_PI;
tdy=1.0/M_PI;
// load texture to GPU memory
if (texture!="")
{
Byte q;
unsigned int *pp;
int xs,ys,x,y,adr,*txr;
union { unsigned int c32; Byte db[4]; } c;
Graphics::TBitmap *bmp=new Graphics::TBitmap; // new bmp
bmp->LoadFromFile(texture); // load from file
bmp->HandleType=bmDIB; // allow direct access to pixels
bmp->PixelFormat=pf32bit; // set pixel to 32bit so int is the same size as pixel
xs=bmp->Width; // resolution should be power of 2
ys=bmp->Height;
txr=new int[xs*ys];
for(adr=0,y=0;y<ys;y++)
{
pp=(unsigned int*)bmp->ScanLine[y];
for(x=0;x<xs;x++,adr++)
{
// rgb2bgr and copy bmp -> txr[]
c.c32=pp[x];
q =c.db[2];
c.db[2]=c.db[0];
c.db[0]=q;
txr[adr]=c.c32;
}
}
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D,txrid);
glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S,GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T,GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE,GL_MODULATE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, xs, ys, 0, GL_RGBA, GL_UNSIGNED_BYTE, txr);
glDisable(GL_TEXTURE_2D);
delete bmp;
delete[] txr;
// texture coordinates by 1 pixel from each edge (GL_CLAMP_TO_EDGE)
tx0+=1.0/GLfloat(xs);
ty0+=1.0/GLfloat(ys);
tdx*=GLfloat(xs-2)/GLfloat(xs);
tdy*=GLfloat(ys-2)/GLfloat(ys);
}
// correct texture coordinate system (invert x)
tx0=1.0-tx0; tdx=-tdx;
da=(2.0*M_PI)/GLfloat(na-1);
db= M_PI /GLfloat(nb-1);
for (ib=0,b=-0.5*M_PI;ib<nb;ib++,b+=db)
for (ia=0,a= 0.0 ;ia<na;ia++,a+=da)
{
x=cos(b)*cos(a);
y=cos(b)*sin(a);
z=sin(b);
nor[ia][ib][0]=x;
nor[ia][ib][1]=y;
nor[ia][ib][2]=z;
pos[ia][ib][0]=r*x;
pos[ia][ib][1]=r*y;
pos[ia][ib][2]=r*z;
txr[ia][ib][0]=tx0+(a*tdx);
txr[ia][ib][1]=ty0+(b*tdy);
}
}
void planet::draw()
{
if (!_init) return;
int ia,ib0,ib1;
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
glTranslatef(x0,y0,z0);
glRotatef(90.0,1.0,0.0,0.0); // rotate planets z axis (North) to OpenGL y axis (Up)
glRotatef(-t,0.0,0.0,1.0); // rotate planets z axis (North) to OpenGL y axis (Up)
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D,txrid);
glColor3f(1.0,1.0,1.0);
for (ib0=0,ib1=1;ib1<nb;ib0=ib1,ib1++)
{
glBegin(GL_QUAD_STRIP);
for (ia=0;ia<na;ia++)
{
glNormal3fv (nor[ia][ib0]);
glTexCoord2fv(txr[ia][ib0]);
glVertex3fv (pos[ia][ib0]);
glNormal3fv (nor[ia][ib1]);
glTexCoord2fv(txr[ia][ib1]);
glVertex3fv (pos[ia][ib1]);
}
glEnd();
}
glDisable(GL_TEXTURE_2D);
glMatrixMode(GL_MODELVIEW);
glPopMatrix();
}
//---------------------------------------------------------------------------
usage:
// variable to store planet (global)
planet earth;
// init after OpenGL initialisation
earth.init(1.0,"earth.bmp");
// position update
earth.x0= 0.0;
earth.y0= 0.0;
earth.z0=-20.0;
// add this to render loop
earth.draw(); // draws the planet
earth.t+=2.5; // just rotate planet by 2.5 deg each frame...
I know its ugly but it does not use any funny stuff just legacy OpenGL and Math.h (cos(),sin(),M_PI) and VCL for bitmap loading. So rewrite to your environment and you will be fine. Do not forget that each planet has its own texture so you need to have one txrid per planet so either have each planet as separate planet variable or rewrite ...

unable to render with array buffers in Java with LWJGL

I'm attempting to learn how to program in OpenGL the modern way, using vertex array/vertex buffer objects. I'm using the tutorials on the LWJGL wiki right now, and even if I copy & paste the tutorial code, I get a window with the background colour set properly but no shape rendered on top of it. The tutorial page shows a screenshot with a white rectangle rendered over the background. Is this a common issue, or is there any way I can get further information on my error?
Edit: using shaders and putting some colour on the vertices fixes the problem. I'm not posting this as an answer quite yet though, because I'm assuming the tutorial code was intended to work without the use of shaders. (which are in a later portion of the tutorial)
This is the code on the tutorial page:
import java.nio.FloatBuffer;
import org.lwjgl.BufferUtils;
import org.lwjgl.LWJGLException;
import org.lwjgl.opengl.ContextAttribs;
import org.lwjgl.opengl.Display;
import org.lwjgl.opengl.DisplayMode;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL15;
import org.lwjgl.opengl.GL20;
import org.lwjgl.opengl.GL30;
import org.lwjgl.opengl.PixelFormat;
import org.lwjgl.util.glu.GLU;
public class TheQuadExampleDrawArrays {
// Entry point for the application
public static void main(String[] args) {
new TheQuadExampleDrawArrays();
}
// Setup variables
private final String WINDOW_TITLE = "The Quad: glDrawArrays";
private final int WIDTH = 320;
private final int HEIGHT = 240;
// Quad variables
private int vaoId = 0;
private int vboId = 0;
private int vertexCount = 0;
public TheQuadExampleDrawArrays() {
// Initialize OpenGL (Display)
this.setupOpenGL();
this.setupQuad();
while (!Display.isCloseRequested()) {
// Do a single loop (logic/render)
this.loopCycle();
// Force a maximum FPS of about 60
Display.sync(60);
// Let the CPU synchronize with the GPU if GPU is tagging behind
Display.update();
}
// Destroy OpenGL (Display)
this.destroyOpenGL();
}
public void setupOpenGL() {
// Setup an OpenGL context with API version 3.2
try {
PixelFormat pixelFormat = new PixelFormat();
ContextAttribs contextAtrributes = new ContextAttribs(3, 2)
.withForwardCompatible(true)
.withProfileCore(true);
Display.setDisplayMode(new DisplayMode(WIDTH, HEIGHT));
Display.setTitle(WINDOW_TITLE);
Display.create(pixelFormat, contextAtrributes);
GL11.glViewport(0, 0, WIDTH, HEIGHT);
} catch (LWJGLException e) {
e.printStackTrace();
System.exit(-1);
}
// Setup an XNA like background color
GL11.glClearColor(0.4f, 0.6f, 0.9f, 0f);
// Map the internal OpenGL coordinate system to the entire screen
GL11.glViewport(0, 0, WIDTH, HEIGHT);
this.exitOnGLError("Error in setupOpenGL");
}
public void setupQuad() {
// OpenGL expects vertices to be defined counter clockwise by default
float[] vertices = {
// Left bottom triangle
-0.5f, 0.5f, 0f,
-0.5f, -0.5f, 0f,
0.5f, -0.5f, 0f,
// Right top triangle
0.5f, -0.5f, 0f,
0.5f, 0.5f, 0f,
-0.5f, 0.5f, 0f
};
// Sending data to OpenGL requires the usage of (flipped) byte buffers
FloatBuffer verticesBuffer = BufferUtils.createFloatBuffer(vertices.length);
verticesBuffer.put(vertices);
verticesBuffer.flip();
vertexCount = 6;
// Create a new Vertex Array Object in memory and select it (bind)
// A VAO can have up to 16 attributes (VBO's) assigned to it by default
vaoId = GL30.glGenVertexArrays();
GL30.glBindVertexArray(vaoId);
// Create a new Vertex Buffer Object in memory and select it (bind)
// A VBO is a collection of Vectors which in this case resemble the location of each vertex.
vboId = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vboId);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, verticesBuffer, GL15.GL_STATIC_DRAW);
// Put the VBO in the attributes list at index 0
GL20.glVertexAttribPointer(0, 3, GL11.GL_FLOAT, false, 0, 0);
// Deselect (bind to 0) the VBO
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
// Deselect (bind to 0) the VAO
GL30.glBindVertexArray(0);
this.exitOnGLError("Error in setupQuad");
}
public void loopCycle() {
GL11.glClear(GL11.GL_COLOR_BUFFER_BIT);
// Bind to the VAO that has all the information about the quad vertices
GL30.glBindVertexArray(vaoId);
GL20.glEnableVertexAttribArray(0);
// Draw the vertices
GL11.glDrawArrays(GL11.GL_TRIANGLES, 0, vertexCount);
// Put everything back to default (deselect)
GL20.glDisableVertexAttribArray(0);
GL30.glBindVertexArray(0);
this.exitOnGLError("Error in loopCycle");
}
public void destroyOpenGL() {
// Disable the VBO index from the VAO attributes list
GL20.glDisableVertexAttribArray(0);
// Delete the VBO
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
GL15.glDeleteBuffers(vboId);
// Delete the VAO
GL30.glBindVertexArray(0);
GL30.glDeleteVertexArrays(vaoId);
Display.destroy();
}
public void exitOnGLError(String errorMessage) {
int errorValue = GL11.glGetError();
if (errorValue != GL11.GL_NO_ERROR) {
String errorString = GLU.gluErrorString(errorValue);
System.err.println("ERROR - " + errorMessage + ": " + errorString);
if (Display.isCreated()) Display.destroy();
System.exit(-1);
}
}
}
This is a late answer but since the Z coordinate is 0 on all of your vertices shouldn't it be at the exact position of the near clipping plane and thus not render because it is out of range, so you should try moving the vertices back along the Z axis and it should theoretically render.

Android getOrientation() method returns bad results

I'm creating 3D Compass application.
I'm using getOrientation method to get orientation (almost same implementation like here). If I place phone on the table it works well, but when top of the phone points to the sky (minus Z axis on the picture; sphere is the Earth) getOrientation starts giving really bad results. It gives values for Z axis between 0 to 180 degrees in a few real degrees. Is there any way how to suppress this behavior? I created a little video what describes problem (sorry for bad quality). Thanks in advance.
Solution:
When you rotating model, there is difference between:
gl.glRotatef(_angleY, 0f, 1f, 0f); //ROLL
gl.glRotatef(_angleX, 1f, 0f, 0f); //ELEVATION
gl.glRotatef(_angleZ, 0f, 0f, 1f); //AZIMUTH
gl.glRotatef(_angleX, 1f, 0f, 0f); //ELEVATION
gl.glRotatef(_angleY, 0f, 1f, 0f); //ROLL
gl.glRotatef(_angleZ, 0f, 0f, 1f); //AZIMUTH
Well, I can see at least 1 problem with this approach of yours.
I assume that you combine a 3D vector corresponding to your magnetometer with an averaging low pass filter to smoothen the data. Although such approach would work great for a sensor value which varies without discontinuities, such as raw data from accelerometer, it doesn't work so great verbatim with angular variables fetched from your magnetometer. Why, one might ask?
Because those angular variables (azimuth, pitch, roll) have an upper-bound and a lower-bound, which means that any value above 180 degrees, say 181 degrees, would wrap around to 181-360 = -179 degrees, and any variable below -180 degrees would wrap around in the other direction. So when one of those angular variables get close to those thresholds (180 or -180), this variable will tend to oscillate to values close to those 2 extremes. When you blindly apply a low-pass filter to those values, you get either a smooth decreasing from 180 degrees towards -180 degrees, or a smooth increasing from -180 towards 180 degrees. Either way, the result would look quite like your video above... As long as one directly applies an averaging buffer onto the raw angle data from getOrientation(...), this problem will be present (and should be present not only for the case where the phone is upright, but also in the cases where there are azimuth angle wraparounds too... Maybe you could test for those bugs as well...).
You say that you tested this with a buffer size of 1. Theoretically, the problem should not be present if there is no averaging at all, although in some implementations of a circular buffer I've seen in the past, it could mean that there is still averaging done with at least 1 past value, not that there is no averaging at all. If this is your case, we have found the root cause of your bug.
Unfortunately, there isn't much of an elegant solution that could be implemented while sticking with your standard averaging filter. What I usually do in this case is switch to another type of low pass filter, which doesn't need any deep buffer to operate: a simple IIR filter (order 1):
diff = x[n] - y[n-1]
y[n] - y[n-1] = alpha * (x[n] - y[n-1]) = alpha * diff
...where y is the filtered angle, x is the raw angle, and alpha<1 is analogous to a time constant, as alpha=1 corresponds to the no-filter case, and the frequency cutoff of the low-pass filter gets lowered as alpha approaches zero. An acute eye would probably have noticed by now that this corresponds to a simple Proportional Controller.
Such a filter allows the compensation of the wraparound of the angle value because we can add or subtract 360 to diff so as to ensure that abs(diff)<=180, which in turn ensures that the filtered angle value will always increase/decrease in the optimal direction to reach its "setpoint".
An example function call, which is to be scheduled periodically, that calculates a filtered angle value y for a given raw angle value x, could be something like this:
private float restrictAngle(float tmpAngle){
while(tmpAngle>=180) tmpAngle-=360;
while(tmpAngle<-180) tmpAngle+=360;
return tmpAngle;
}
//x is a raw angle value from getOrientation(...)
//y is the current filtered angle value
private float calculateFilteredAngle(float x, float y){
final float alpha = 0.1f;
float diff = x-y;
//here, we ensure that abs(diff)<=180
diff = restrictAngle(diff);
y += alpha*diff;
//ensure that y stays within [-180, 180[ bounds
y = restrictAngle(y);
return y;
}
The function calculateFilteredAngle(float x, float y) can then be called periodically using something like this (example for azimuth angle from getOrientation(...) function:
filteredAzimuth = calculateFilteredAngle(azimuth, filteredAzimuth);
Using this method, the filter would not misbehave like the averaging filter as mentioned by the OP.
As I could not load the .apk uploaded by the OP, I decided to implement my own test project in order to see if the corrections work. Here is the entire code (it does not use a .XML for the main layout, so I did not include it). Simply copy it to a test project to see if it works on a specific device (tested functional on a HTC Desire w/ Android v. 2.1):
File 1: Compass3DActivity.java:
package com.epichorns.compass3D;
import android.app.Activity;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Bundle;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
public class Compass3DActivity extends Activity {
//Textviews for showing angle data
TextView mTextView_azimuth;
TextView mTextView_pitch;
TextView mTextView_roll;
TextView mTextView_filtered_azimuth;
TextView mTextView_filtered_pitch;
TextView mTextView_filtered_roll;
float mAngle0_azimuth=0;
float mAngle1_pitch=0;
float mAngle2_roll=0;
float mAngle0_filtered_azimuth=0;
float mAngle1_filtered_pitch=0;
float mAngle2_filtered_roll=0;
private Compass3DView mCompassView;
private SensorManager sensorManager;
//sensor calculation values
float[] mGravity = null;
float[] mGeomagnetic = null;
float Rmat[] = new float[9];
float Imat[] = new float[9];
float orientation[] = new float[3];
SensorEventListener mAccelerometerListener = new SensorEventListener(){
public void onAccuracyChanged(Sensor sensor, int accuracy) {}
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER){
mGravity = event.values.clone();
processSensorData();
}
}
};
SensorEventListener mMagnetometerListener = new SensorEventListener(){
public void onAccuracyChanged(Sensor sensor, int accuracy) {}
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD){
mGeomagnetic = event.values.clone();
processSensorData();
update();
}
}
};
private float restrictAngle(float tmpAngle){
while(tmpAngle>=180) tmpAngle-=360;
while(tmpAngle<-180) tmpAngle+=360;
return tmpAngle;
}
//x is a raw angle value from getOrientation(...)
//y is the current filtered angle value
private float calculateFilteredAngle(float x, float y){
final float alpha = 0.3f;
float diff = x-y;
//here, we ensure that abs(diff)<=180
diff = restrictAngle(diff);
y += alpha*diff;
//ensure that y stays within [-180, 180[ bounds
y = restrictAngle(y);
return y;
}
public void processSensorData(){
if (mGravity != null && mGeomagnetic != null) {
boolean success = SensorManager.getRotationMatrix(Rmat, Imat, mGravity, mGeomagnetic);
if (success) {
SensorManager.getOrientation(Rmat, orientation);
mAngle0_azimuth = (float)Math.toDegrees((double)orientation[0]); // orientation contains: azimut, pitch and roll
mAngle1_pitch = (float)Math.toDegrees((double)orientation[1]); //pitch
mAngle2_roll = -(float)Math.toDegrees((double)orientation[2]); //roll
mAngle0_filtered_azimuth = calculateFilteredAngle(mAngle0_azimuth, mAngle0_filtered_azimuth);
mAngle1_filtered_pitch = calculateFilteredAngle(mAngle1_pitch, mAngle1_filtered_pitch);
mAngle2_filtered_roll = calculateFilteredAngle(mAngle2_roll, mAngle2_filtered_roll);
}
mGravity=null; //oblige full new refresh
mGeomagnetic=null; //oblige full new refresh
}
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
LinearLayout ll = new LinearLayout(this);
LinearLayout.LayoutParams llParams = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.FILL_PARENT, LinearLayout.LayoutParams.FILL_PARENT);
ll.setLayoutParams(llParams);
ll.setOrientation(LinearLayout.VERTICAL);
ViewGroup.LayoutParams txtParams = new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
mTextView_azimuth = new TextView(this);
mTextView_azimuth.setLayoutParams(txtParams);
mTextView_pitch = new TextView(this);
mTextView_pitch.setLayoutParams(txtParams);
mTextView_roll = new TextView(this);
mTextView_roll.setLayoutParams(txtParams);
mTextView_filtered_azimuth = new TextView(this);
mTextView_filtered_azimuth.setLayoutParams(txtParams);
mTextView_filtered_pitch = new TextView(this);
mTextView_filtered_pitch.setLayoutParams(txtParams);
mTextView_filtered_roll = new TextView(this);
mTextView_filtered_roll.setLayoutParams(txtParams);
mCompassView = new Compass3DView(this);
ViewGroup.LayoutParams compassParams = new ViewGroup.LayoutParams(200,200);
mCompassView.setLayoutParams(compassParams);
ll.addView(mCompassView);
ll.addView(mTextView_azimuth);
ll.addView(mTextView_pitch);
ll.addView(mTextView_roll);
ll.addView(mTextView_filtered_azimuth);
ll.addView(mTextView_filtered_pitch);
ll.addView(mTextView_filtered_roll);
setContentView(ll);
sensorManager = (SensorManager) this.getSystemService(Context.SENSOR_SERVICE);
sensorManager.registerListener(mAccelerometerListener, sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_UI);
sensorManager.registerListener(mMagnetometerListener, sensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD), SensorManager.SENSOR_DELAY_UI);
update();
}
#Override
public void onDestroy(){
super.onDestroy();
sensorManager.unregisterListener(mAccelerometerListener);
sensorManager.unregisterListener(mMagnetometerListener);
}
private void update(){
mCompassView.changeAngles(mAngle1_filtered_pitch, mAngle2_filtered_roll, mAngle0_filtered_azimuth);
mTextView_azimuth.setText("Azimuth: "+String.valueOf(mAngle0_azimuth));
mTextView_pitch.setText("Pitch: "+String.valueOf(mAngle1_pitch));
mTextView_roll.setText("Roll: "+String.valueOf(mAngle2_roll));
mTextView_filtered_azimuth.setText("Azimuth: "+String.valueOf(mAngle0_filtered_azimuth));
mTextView_filtered_pitch.setText("Pitch: "+String.valueOf(mAngle1_filtered_pitch));
mTextView_filtered_roll.setText("Roll: "+String.valueOf(mAngle2_filtered_roll));
}
}
File 2: Compass3DView.java:
package com.epichorns.compass3D;
import android.content.Context;
import android.opengl.GLSurfaceView;
public class Compass3DView extends GLSurfaceView {
private Compass3DRenderer mRenderer;
public Compass3DView(Context context) {
super(context);
mRenderer = new Compass3DRenderer(context);
setRenderer(mRenderer);
}
public void changeAngles(float angle0, float angle1, float angle2){
mRenderer.setAngleX(angle0);
mRenderer.setAngleY(angle1);
mRenderer.setAngleZ(angle2);
}
}
File 3: Compass3DRenderer.java:
package com.epichorns.compass3D;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.opengl.GLSurfaceView;
public class Compass3DRenderer implements GLSurfaceView.Renderer {
Context mContext;
// a raw buffer to hold indices
ShortBuffer _indexBuffer;
// raw buffers to hold the vertices
FloatBuffer _vertexBuffer0;
FloatBuffer _vertexBuffer1;
FloatBuffer _vertexBuffer2;
FloatBuffer _vertexBuffer3;
FloatBuffer _vertexBuffer4;
FloatBuffer _vertexBuffer5;
int _numVertices = 3; //standard triangle vertices = 3
FloatBuffer _textureBuffer0123;
//private FloatBuffer _light0Position;
//private FloatBuffer _light0Ambient;
float _light0Position[] = new float[]{10.0f, 10.0f, 10.0f, 0.0f};
float _light0Ambient[] = new float[]{0.05f, 0.05f, 0.05f, 1.0f};
float _light0Diffuse[] = new float[]{0.5f, 0.5f, 0.5f, 1.0f};
float _light0Specular[] = new float[]{0.7f, 0.7f, 0.7f, 1.0f};
float _matAmbient[] = new float[] { 0.6f, 0.6f, 0.6f, 1.0f };
float _matDiffuse[] = new float[] { 0.6f, 0.6f, 0.6f, 1.0f };
private float _angleX=0f;
private float _angleY=0f;
private float _angleZ=0f;
Compass3DRenderer(Context context){
super();
mContext = context;
}
public void setAngleX(float angle) {
_angleX = angle;
}
public void setAngleY(float angle) {
_angleY = angle;
}
public void setAngleZ(float angle) {
_angleZ = angle;
}
FloatBuffer InitFloatBuffer(float[] src){
ByteBuffer bb = ByteBuffer.allocateDirect(4*src.length);
bb.order(ByteOrder.nativeOrder());
FloatBuffer inBuf = bb.asFloatBuffer();
inBuf.put(src);
return inBuf;
}
ShortBuffer InitShortBuffer(short[] src){
ByteBuffer bb = ByteBuffer.allocateDirect(2*src.length);
bb.order(ByteOrder.nativeOrder());
ShortBuffer inBuf = bb.asShortBuffer();
inBuf.put(src);
return inBuf;
}
//Init data for our rendered pyramid
private void initTriangles() {
//Side faces triangles
float[] coords = {
-0.25f, -0.5f, 0.25f,
0.25f, -0.5f, 0.25f,
0f, 0.5f, 0f
};
float[] coords1 = {
0.25f, -0.5f, 0.25f,
0.25f, -0.5f, -0.25f,
0f, 0.5f, 0f
};
float[] coords2 = {
0.25f, -0.5f, -0.25f,
-0.25f, -0.5f, -0.25f,
0f, 0.5f, 0f
};
float[] coords3 = {
-0.25f, -0.5f, -0.25f,
-0.25f, -0.5f, 0.25f,
0f, 0.5f, 0f
};
//Base triangles
float[] coords4 = {
-0.25f, -0.5f, 0.25f,
0.25f, -0.5f, -0.25f,
0.25f, -0.5f, 0.25f
};
float[] coords5 = {
-0.25f, -0.5f, 0.25f,
-0.25f, -0.5f, -0.25f,
0.25f, -0.5f, -0.25f
};
float[] textures0123 = {
// Mapping coordinates for the vertices (UV mapping CW)
0.0f, 0.0f, // bottom left
1.0f, 0.0f, // bottom right
0.5f, 1.0f, // top ctr
};
_vertexBuffer0 = InitFloatBuffer(coords);
_vertexBuffer0.position(0);
_vertexBuffer1 = InitFloatBuffer(coords1);
_vertexBuffer1.position(0);
_vertexBuffer2 = InitFloatBuffer(coords2);
_vertexBuffer2.position(0);
_vertexBuffer3 = InitFloatBuffer(coords3);
_vertexBuffer3.position(0);
_vertexBuffer4 = InitFloatBuffer(coords4);
_vertexBuffer4.position(0);
_vertexBuffer5 = InitFloatBuffer(coords5);
_vertexBuffer5.position(0);
_textureBuffer0123 = InitFloatBuffer(textures0123);
_textureBuffer0123.position(0);
short[] indices = {0, 1, 2};
_indexBuffer = InitShortBuffer(indices);
_indexBuffer.position(0);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
gl.glEnable(GL10.GL_CULL_FACE); // enable the differentiation of which side may be visible
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glFrontFace(GL10.GL_CCW); // which is the front? the one which is drawn counter clockwise
gl.glCullFace(GL10.GL_BACK); // which one should NOT be drawn
initTriangles();
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
public void onDrawFrame(GL10 gl) {
gl.glPushMatrix();
gl.glClearColor(0, 0, 0, 1.0f); //clipping backdrop color
// clear the color buffer to show the ClearColor we called above...
gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
// set rotation
gl.glRotatef(_angleY, 0f, 1f, 0f); //ROLL
gl.glRotatef(_angleX, 1f, 0f, 0f); //ELEVATION
gl.glRotatef(_angleZ, 0f, 0f, 1f); //AZIMUTH
//Draw our pyramid
//4 side faces
gl.glColor4f(0.5f, 0f, 0f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer0);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glColor4f(0.5f, 0.5f, 0f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer1);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glColor4f(0f, 0.5f, 0f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer2);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glColor4f(0f, 0.5f, 0.5f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer3);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
//Base face
gl.glColor4f(0f, 0f, 0.5f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer4);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer5);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glPopMatrix();
}
public void onSurfaceChanged(GL10 gl, int w, int h) {
gl.glViewport(0, 0, w, h);
gl.glViewport(0, 0, w, h);
}
}
Please note that this code does not compensate for tablet default landscape orientation, so it is only expected to work correctly on a phone (I didn't have a tablet close by to test any correction code).
You should probably try a longer delay like Game and/or keep/increase the size of your circular buffer. The sensors (accelerometer, compass, etc.) on mobile devices are inherently noisy so when I asked about 'low pass filter', I meant do you use more data to decrease the frequency of your app usable updates. Your video was done inside, I would also recommend going to a place with less EM interference such as a park just to check that the behavior is consistent as well as the standard compass reset action (rotate device in figure-8). In the end you may have to apply some heuristics to throw out the 'bad' data to make a smoother experience for the user.
Well I had exactly the same problem as I was retrieving orientation. Thing is that I didn't get is solved (I had to set a constraint when it comes to the device position when retrieving it), and I don't know if you'll ever be able to.
Pick a magnetical compass and try to get north orientation when the compass is in the situation you describe - you will get the same non-sense results. So you can't really expect the device's compass to do it any better !
Few words about filtering, with your permissions.
I would suggest to do averaging on Magnetic Field Vector itself before turning it into angles.
It is wrong to do averaging/smoothing only on angles without use of some sort of magnitude. Angles themselves are not providing enough data to detect direction/heading/bearing.
Example: When you want to know average wind direction during the whole day you must use the strength of the wind, not just only angles. If you will average only angles you will get absolutely wrong wind direction.
As for bearing direction I would use the speed for magnitude.

Categories

Resources