OpenGL first steps with light and normals - java

I'm playing around with some basic OpenGL stuff and I'm trying to set up a simple square with lighting enabled, but the lighting is not correct so there is something wrong with my normals i guess.
Or is my understanding of normals totally wrong?
Here's my rendering code (btw I'm using lwjgl):
public class Renderer {
DisplayMode displayMode;
int i;
int width;
int height;
private boolean drawAxes = false;
private float rotation = 40.0f;
private float zoom = -20f;
// ----------- Variables added for Lighting Test -----------//
private FloatBuffer matSpecular;
private FloatBuffer lightPosition;
private FloatBuffer whiteLight;
private FloatBuffer lModelAmbient;
public Renderer(int width, int height) {
this.width = width;
this.height = height;
}
public static Renderer start() throws LWJGLException {
Renderer r = new Renderer(800, 600);
r.initContext();
r.run();
return r;
}
private void initContext() throws LWJGLException {
Display.setFullscreen(false);
DisplayMode d[] = Display.getAvailableDisplayModes();
for (int i = 0; i < d.length; i++) {
if (d[i].getWidth() == width && d[i].getHeight() == height && d[i].getBitsPerPixel() == 32) {
displayMode = d[i];
break;
}
}
Display.setDisplayMode(displayMode);
Display.create();
}
private void run() {
initGL();
while (!Display.isCloseRequested()) {
preRender();
render();
Display.update();
Display.sync(60);
}
Display.destroy();
}
private void initGL() {
GL11.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); // Black Background
GL11.glClearDepth(1.0); // Depth Buffer Setup
GL11.glEnable(GL11.GL_DEPTH_TEST); // Enables Depth Testing
GL11.glDepthFunc(GL11.GL_LEQUAL); // The Type Of Depth Testing To Do
GL11.glMatrixMode(GL11.GL_PROJECTION); // Select The Projection Matrix
GL11.glLoadIdentity(); // Reset The Projection Matrix
// Calculate The Aspect Ratio Of The Window
GLU.gluPerspective(45.0f, (float) displayMode.getWidth() / (float) displayMode.getHeight(), 0.1f, 100.0f);
GL11.glMatrixMode(GL11.GL_MODELVIEW); // Select The Modelview Matrix
// Really Nice Perspective Calculations
GL11.glHint(GL11.GL_PERSPECTIVE_CORRECTION_HINT, GL11.GL_NICEST);
GL11.glPolygonMode(GL11.GL_FRONT_AND_BACK, GL11.GL_FILL);
initLightArrays();
glShadeModel(GL_SMOOTH);
glMaterial(GL_FRONT, GL_SPECULAR, matSpecular); // sets specular material color
glMaterialf(GL_FRONT, GL_SHININESS, 100.0f); // sets shininess
glLight(GL_LIGHT0, GL_POSITION, lightPosition); // sets light position
glLight(GL_LIGHT0, GL_SPECULAR, whiteLight); // sets specular light to white
glLight(GL_LIGHT0, GL_DIFFUSE, whiteLight); // sets diffuse light to white
glLightModel(GL_LIGHT_MODEL_AMBIENT, lModelAmbient); // global ambient light
glEnable(GL_LIGHTING); // enables lighting
glEnable(GL_LIGHT0); // enables light0
glEnable(GL_COLOR_MATERIAL); // enables opengl to use glColor3f to define material color
glColorMaterial(GL_FRONT, GL_AMBIENT_AND_DIFFUSE); // tell opengl glColor3f effects the ambient and diffuse properties of material
}
private void preRender() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
GL11.glTranslatef(0f, 0f, zoom);
GL11.glRotatef(-60f, 1f, 0f, 0f);
GL11.glRotatef(rotation, 0f, 0f, 1f);
}
private void render() {
FloatBuffer cBuffer = BufferUtils.createFloatBuffer(6*3);
float[] cArray = { 1f,1f,1f,
1f,1f,1f,
1f,1f,1f,
1f,1f,1f,
1f,1f,1f,
1f,1f,1f};
cBuffer.put(cArray);
cBuffer.flip();
FloatBuffer vBuffer = BufferUtils.createFloatBuffer(6*3);
float[] vArray = { 1f,1f,0f,
-1f,-1f,0,
1f,-1f,0,
1f,1f,0f,
-1f,1f,0,
-1f,-1f,0};
vBuffer.put(vArray);
vBuffer.flip();
FloatBuffer nBuffer = BufferUtils.createFloatBuffer(6*3);
float[] nArray = { 0f,0f,1f,
0f,0f,1f,
0f,0f,1f,
0f,0f,1f,
0f,0f,1f,
0f,0f,1f};
nBuffer.put(nArray);
nBuffer.flip();
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glColorPointer(3, 0, cBuffer);
glVertexPointer(3, 0, vBuffer);
glNormalPointer(3, nBuffer);
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableClientState(GL_COLOR_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
if (drawAxes) {
drawAxes(6);
}
glTranslatef(0.0f, 0.0f, 3);
glColor3f(0.1f, 0.4f, 0.9f);
}
public static void main(String[] args) throws LWJGLException {
System.setProperty("org.lwjgl.opengl.Display.allowSoftwareOpenGL", "true");
Renderer.start();
}

You are setting your normal pointer wrong:
glColorPointer(3, 0, cBuffer);
glVertexPointer(3, 0, vBuffer);
glNormalPointer(3, nBuffer);
The fixed-function GL might always expects normals to be 3-dimensional vectors, henze the size parameter (which tells the GL how many values are there in every vector) is not present in glNormalPointer. The 3 you are setting here is the stride parameter, which specifies the byte offset between consecutive array elements. Now 3 does not make any sence, it will interpret the second normal as to beginning 3 bytes into the arry, which means it combines the last byte of your first normal's x component together with 3 bytes from your first normal's y component when it reads the second normal'x s component, and so on...
Since your array is tightly packed, you can use the shortcut 0 here, like you do with the other pointers.
However, you must be aware that all of that is deprecated since almost a decade in OpenGL, modern core versions of OpenGL do not support the fixed function pipeline at all. If you are learning OpenGL nowadays, I strongly recommend you to learn modern, shader-based GL instead.

Without seeing more of your code, it's very difficult to see exactly what's going wrong.
However, I do see one thing that could be a problem:
FloatBuffer vBuffer = BufferUtils.createFloatBuffer(6*3);
float[] vArray = { 1f,1f,0f,
1f,-1f,0,
-1f,-1f,0,
1f,1f,0f,
-1f,1f,0,
-1f,-1f,0};
vBuffer.put(vArray);
vBuffer.flip();
The winding order on your triangles are not the same. The first triangle winds clockwise, whereas the second triangle winds counter-clockwise. You'll need to reorder the vertices to make sure that they wind in the same direction. OpenGL usually prefers things to wind counter-clockwise, so if I were you, I'd flip the first triangle.
If you're still getting the problem after you've done this, then post the rest of your draw code, as what you're showing here doesn't give a lot of information.

Related

OpenGL ES 3.0 call object by reference To add gravity and OnCLick Event

I am new to OpenGL and I am trying to find a way to give a drawn object such as a triangle an ID of some kind. This way I can call the Id and give it motion as well as ad touch events.
I am not sure if this is the correct way or if there is a much better way to do this. I have objects made but do not know how to call them and give them motion or onClick event. I have looked around however a lot of the ways seem to be outdated and do not work or the link is now dead.
I have a Renderer as such:
public class MyGLRenderer implements GLSurfaceView.Renderer {
private Triangle mTriangle;
// Called once to set up the view's opengl es environment
public void onSurfaceCreated(GL10 unused, EGLConfig config){
//Set the background frame color
GLES30.glClearColor(255.0f,255.0f,255.0f,0.0f);
mTriangle = new Triangle();
}
// Called for each redraw of the view
public void onDrawFrame(GL10 gl){
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
//Redraw background color
GLES30.glClear(GLES30.GL_COLOR_BUFFER_BIT);
mTriangle.draw();
}
// Called if the geometry of the view changes (example is when the screen orientation changes from landscape to portrait
public void onSurfaceChanged(GL10 unused, int width, int height){
// Called if the geometry of the viewport changes
GLES30.glViewport(0, 0, width, height);
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES30.GL_VERTEX_SHADER)
// or a fragment shader type (GLES30.GL_FRAGMENT_SHADER)
int shader = GLES30.glCreateShader(type);
// add the source code to the shader and compile it
GLES30.glShaderSource(shader, shaderCode);
GLES30.glCompileShader(shader);
return shader;
}
}
Surface View as Such:
public class MyGLSurfaceView extends GLSurfaceView {
private final MyGLRenderer mRenderer;
public MyGLSurfaceView(Context context, AttributeSet attrs){
super(context, attrs);
//Create an OpenGl 3.0 context
setEGLContextClientVersion(3);
mRenderer = new MyGLRenderer();
//Set the Renderer for drawing on the GLSurfaceView
setRenderer(mRenderer);
//Render the view only when there is a change in the drawing data
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
}
And a Triangle Class as such:
public class Triangle {
private FloatBuffer vertexBuffer;
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final int mProgram;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float triangleCoords[] = { // in counterclockwise order:
0.0f, 0.622008459f, 0.0f, // top
-0.5f, -0.311004243f, 0.0f, // bottom left
0.5f, -0.311004243f, 0.0f // bottom right
};
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };
public Triangle() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
int vertexShader = MyGLRenderer.loadShader(GLES30.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyGLRenderer.loadShader(GLES30.GL_FRAGMENT_SHADER,
fragmentShaderCode);
// create empty OpenGL ES Program
mProgram = GLES30.glCreateProgram();
// add the vertex shader to program
GLES30.glAttachShader(mProgram, vertexShader);
// add the fragment shader to program
GLES30.glAttachShader(mProgram, fragmentShader);
// creates OpenGL ES program executables
GLES30.glLinkProgram(mProgram);
}
private int mPositionHandle;
private int mColorHandle;
private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
public void draw() {
// Add program to OpenGL ES environment
GLES30.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES30.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES30.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES30.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES30.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES30.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES30.glUniform4fv(mColorHandle, 1, color, 0);
// Draw the triangle
GLES30.glDrawArrays(GLES30.GL_TRIANGLES, 0, vertexCount);
// Disable vertex array
GLES30.glDisableVertexAttribArray(mPositionHandle);
}
I would like something to do the following:
drawnTriangleObject_ID.Add gravity to move down or up
drawnTriangleObject_ID.OnClick( // Do Something when this object is clicked )
In your triangle class, add positioning data
float x = 0.0f;
float y = 0.0f;
float z = 0.0f;
and when drawing the triangle, you have to apply the translation
Matrix.setIdentityM(modelmatrix, 0);
Matrix.translateM(modelmatrix, 0, x, y, z);
then multiply the model matrix by the view matrix
Matrix.multiplyMM(resultmodelview, 0, viewmatrix, 0, modelmatrix, 0);
then multiply the result with projection matrix
Matrix.multiplyMM(resultresultprojection, 0, ProjectionMatrix, 0, resultmodelview, 0);
and publish it
World.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, resultresultprojection, 0);
This all supposing you have the frustum and projection and view matrix built already (which if you see the triangle, could be already done)..
Good luck and have fun coding !
As for the "OnClick" it is a bit more tricky:
- Once the screen has been clicked, cast a ray from your XY plance (screen) to the 3D world. the ray (line) will have 2 coordinates, a start point and end point in form of X,Y,Z, or might have a start point and a vector(direction of the line)... On each frame you have to check if this ray is created, if it is, you need to use some Math to check if that line intersects your triangle (Don't forget to apply the rotation and translation of the triangle to it's vertices before checking intersection with the Ray). when the frame has been drawn don't forget to delete the ray

Applying map of the earth texture a Sphere

i been trying to implement a 3D animation in openGL (using JOGL) of a solar system so far i have 5 planets of different sizes but the problem i seem to be having is i cant add a map of the earth texture on a Sphere can anybody help me on how its done?
This is the code i have so far in my Display method:
#Override
public void display(GLAutoDrawable drawable) {
GL2 gl = drawable.getGL().getGL2();
GLU glu = new GLU();
gl.glClear(GL.GL_COLOR_BUFFER_BIT);
//make sure we are in model_view mode
gl.glMatrixMode(GL2.GL_MODELVIEW);
gl.glLoadIdentity();
glu.gluLookAt(10,20,20,0,3,0,0, 20, 0);
//gl.glMatrixMode(GL2.GL_PROJECTION);
//glu.gluPerspective(45,1,1,25);
//render ground plane
gl.glPushMatrix();
gl.glTranslatef(-10.75f, 3.0f, -1.0f);
gl.glColor3f(0.3f, 0.5f, 1f);
GLUquadric earth = glu.gluNewQuadric();
glu.gluQuadricDrawStyle(earth, GLU.GLU_FILL);
glu.gluQuadricNormals(earth, GLU.GLU_FLAT);
glu.gluQuadricOrientation(earth, GLU.GLU_OUTSIDE);
final float radius = 3.378f;
final int slices = 89;
final int stacks = 16;
glu.gluSphere(earth, radius, slices, stacks);
glu.gluDeleteQuadric(earth);
Texture earths;
try {
earths = TextureIO.newTexture(new File("earth.png"), true);
}
catch (IOException e) {
javax.swing.JOptionPane.showMessageDialog(null, e);
}
gl.glPopMatrix();
//gl.glEnd();
gl.glPushMatrix();
gl.glTranslatef(2.75f, 3.0f, -0.0f);
gl.glColor3f(0.3f, 0.5f, 1f);
GLUquadric earth1 = glu.gluNewQuadric();
glu.gluQuadricDrawStyle(earth1, GLU.GLU_FILL);
glu.gluQuadricNormals(earth1, GLU.GLU_FLAT);
glu.gluQuadricOrientation(earth1, GLU.GLU_OUTSIDE);
final float radius1 = 3.378f;
final int slices1 = 90;
final int stacks1 = 63;
glu.gluSphere(earth1, radius1, slices1, stacks1);
glu.gluDeleteQuadric(earth1);
gl.glPopMatrix();
gl.glPushMatrix();
gl.glTranslatef(3.75f, 6.0f, -7.20f);
gl.glColor3f(0.3f, 0.5f, 1f);
GLUquadric earth3 = glu.gluNewQuadric();
glu.gluQuadricDrawStyle(earth3, GLU.GLU_FILL);
glu.gluQuadricNormals(earth3, GLU.GLU_FLAT);
glu.gluQuadricOrientation(earth1, GLU.GLU_OUTSIDE);
final float radius3 = 1.878f;
final int slices3 = 89;
final int stacks3 = 16;
glu.gluSphere(earth3, radius3, slices3, stacks3);
glu.gluDeleteQuadric(earth3);
gl.glPopMatrix();
gl.glPushMatrix();
gl.glTranslatef(12.75f, 2.0f, -7.20f);
gl.glColor3f(0.3f, 0.5f, 1f);
GLUquadric earth4 = glu.gluNewQuadric();
glu.gluQuadricDrawStyle(earth4, GLU.GLU_FILL);
glu.gluQuadricNormals(earth4, GLU.GLU_FLAT);
glu.gluQuadricOrientation(earth4, GLU.GLU_OUTSIDE);
final float radius4 = 1.078f;
final int slices4 = 89;
final int stacks4 = 16;
glu.gluSphere(earth4, radius4, slices4, stacks4);
glu.gluDeleteQuadric(earth4);
gl.glPopMatrix();
gl.glPushMatrix();
gl.glTranslatef(2.75f, -6.0f, -0.0f);
gl.glColor3f(0.3f, 0.5f, 1f);
GLUquadric earth5 = glu.gluNewQuadric();
glu.gluQuadricDrawStyle(earth5, GLU.GLU_FILL);
glu.gluQuadricNormals(earth5, GLU.GLU_FLAT);
glu.gluQuadricOrientation(earth5, GLU.GLU_OUTSIDE);
final float radius5 = 3.778f;
final int slices5 = 90;
final int stacks5 = 63;
glu.gluSphere(earth5, radius5, slices5, stacks5);
glu.gluDeleteQuadric(earth5);
gl.glPopMatrix();
}
create your own sphere mesh
simple 2D loop through 2 angles (spherical coordinate system 2 Cartesian). You can easily add ellipsoid properties (earth is not a sphere) if you want more precision. If not then you can use single sphere mesh for all planets and just scale it before use ...
let a be the longitude and b the latitude so loop a from 0 to 2*PI [rad] and b from -0.5*PI to +0.5*PI [rad] where PI=3.1415... is the Pi (in C++ math.h it is called M_PI). If your math api uses degrees then convert to degrees PI [rad] = 180.0 [deg]
add necessary info per vertex
normals for lighting
// just unit sphere
nx=cos(b)*cos(a);
ny=cos(b)*sin(a);
nz=sin(b);
texture coordinate (assuming rectangle non distorted image)
// just convert a,b to <0,1> range
tx=a/(2.0*PI)
ty=(b/PI)+0.5;
vertex position
// just sphere(rx=ry=rz=r) or ellipsoid (rx=ry=equatorial and rz=polar radius)
// can also use rx*nx,ry*ny,rz*nz instead ...
x=rx*cos(b)*cos(a);
y=ry*cos(b)*sin(a);
z=rz*sin(b);
send all of this to OpenGL
so all above store in some memory space (CPU or GPU) and then send to rendering. You can use legacy glBegin(QUAD_STRIP); ... glEnd(); or displaylist/VBO/VAO. Bind the right texture before each planet/body and do not forget to update ModelView matrix too. This is how mine coordinate systems looks like:
Also have a look at these related Q/As:
realistic n-body solar system
sphere mesh by subdivision
[edit1] C++ example
//---------------------------------------------------------------------------
const int nb=15; // slices
const int na=nb<<1; // points per equator
class planet
{
public:
bool _init; // has been initiated ?
GLfloat x0,y0,z0; // center of planet [GCS]
GLfloat pos[na][nb][3]; // vertex
GLfloat nor[na][nb][3]; // normal
GLfloat txr[na][nb][2]; // texcoord
GLuint txrid; // texture id
GLfloat t; // dayly rotation angle [deg]
planet() { _init=false; txrid=0; x0=0.0; y0=0.0; z0=0.0; t=0.0; }
~planet() { if (_init) glDeleteTextures(1,&txrid); }
void init(GLfloat r,AnsiString texture); // call after OpenGL is already working !!!
void draw();
};
void planet::init(GLfloat r,AnsiString texture)
{
if (!_init) { _init=true; glGenTextures(1,&txrid); }
GLfloat x,y,z,a,b,da,db;
GLfloat tx0,tdx,ty0,tdy;// just correction if CLAMP_TO_EDGE is not available
int ia,ib;
// a,b to texture coordinate system
tx0=0.0;
ty0=0.5;
tdx=0.5/M_PI;
tdy=1.0/M_PI;
// load texture to GPU memory
if (texture!="")
{
Byte q;
unsigned int *pp;
int xs,ys,x,y,adr,*txr;
union { unsigned int c32; Byte db[4]; } c;
Graphics::TBitmap *bmp=new Graphics::TBitmap; // new bmp
bmp->LoadFromFile(texture); // load from file
bmp->HandleType=bmDIB; // allow direct access to pixels
bmp->PixelFormat=pf32bit; // set pixel to 32bit so int is the same size as pixel
xs=bmp->Width; // resolution should be power of 2
ys=bmp->Height;
txr=new int[xs*ys];
for(adr=0,y=0;y<ys;y++)
{
pp=(unsigned int*)bmp->ScanLine[y];
for(x=0;x<xs;x++,adr++)
{
// rgb2bgr and copy bmp -> txr[]
c.c32=pp[x];
q =c.db[2];
c.db[2]=c.db[0];
c.db[0]=q;
txr[adr]=c.c32;
}
}
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D,txrid);
glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S,GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T,GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE,GL_MODULATE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, xs, ys, 0, GL_RGBA, GL_UNSIGNED_BYTE, txr);
glDisable(GL_TEXTURE_2D);
delete bmp;
delete[] txr;
// texture coordinates by 1 pixel from each edge (GL_CLAMP_TO_EDGE)
tx0+=1.0/GLfloat(xs);
ty0+=1.0/GLfloat(ys);
tdx*=GLfloat(xs-2)/GLfloat(xs);
tdy*=GLfloat(ys-2)/GLfloat(ys);
}
// correct texture coordinate system (invert x)
tx0=1.0-tx0; tdx=-tdx;
da=(2.0*M_PI)/GLfloat(na-1);
db= M_PI /GLfloat(nb-1);
for (ib=0,b=-0.5*M_PI;ib<nb;ib++,b+=db)
for (ia=0,a= 0.0 ;ia<na;ia++,a+=da)
{
x=cos(b)*cos(a);
y=cos(b)*sin(a);
z=sin(b);
nor[ia][ib][0]=x;
nor[ia][ib][1]=y;
nor[ia][ib][2]=z;
pos[ia][ib][0]=r*x;
pos[ia][ib][1]=r*y;
pos[ia][ib][2]=r*z;
txr[ia][ib][0]=tx0+(a*tdx);
txr[ia][ib][1]=ty0+(b*tdy);
}
}
void planet::draw()
{
if (!_init) return;
int ia,ib0,ib1;
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
glTranslatef(x0,y0,z0);
glRotatef(90.0,1.0,0.0,0.0); // rotate planets z axis (North) to OpenGL y axis (Up)
glRotatef(-t,0.0,0.0,1.0); // rotate planets z axis (North) to OpenGL y axis (Up)
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D,txrid);
glColor3f(1.0,1.0,1.0);
for (ib0=0,ib1=1;ib1<nb;ib0=ib1,ib1++)
{
glBegin(GL_QUAD_STRIP);
for (ia=0;ia<na;ia++)
{
glNormal3fv (nor[ia][ib0]);
glTexCoord2fv(txr[ia][ib0]);
glVertex3fv (pos[ia][ib0]);
glNormal3fv (nor[ia][ib1]);
glTexCoord2fv(txr[ia][ib1]);
glVertex3fv (pos[ia][ib1]);
}
glEnd();
}
glDisable(GL_TEXTURE_2D);
glMatrixMode(GL_MODELVIEW);
glPopMatrix();
}
//---------------------------------------------------------------------------
usage:
// variable to store planet (global)
planet earth;
// init after OpenGL initialisation
earth.init(1.0,"earth.bmp");
// position update
earth.x0= 0.0;
earth.y0= 0.0;
earth.z0=-20.0;
// add this to render loop
earth.draw(); // draws the planet
earth.t+=2.5; // just rotate planet by 2.5 deg each frame...
I know its ugly but it does not use any funny stuff just legacy OpenGL and Math.h (cos(),sin(),M_PI) and VCL for bitmap loading. So rewrite to your environment and you will be fine. Do not forget that each planet has its own texture so you need to have one txrid per planet so either have each planet as separate planet variable or rewrite ...

unable to render with array buffers in Java with LWJGL

I'm attempting to learn how to program in OpenGL the modern way, using vertex array/vertex buffer objects. I'm using the tutorials on the LWJGL wiki right now, and even if I copy & paste the tutorial code, I get a window with the background colour set properly but no shape rendered on top of it. The tutorial page shows a screenshot with a white rectangle rendered over the background. Is this a common issue, or is there any way I can get further information on my error?
Edit: using shaders and putting some colour on the vertices fixes the problem. I'm not posting this as an answer quite yet though, because I'm assuming the tutorial code was intended to work without the use of shaders. (which are in a later portion of the tutorial)
This is the code on the tutorial page:
import java.nio.FloatBuffer;
import org.lwjgl.BufferUtils;
import org.lwjgl.LWJGLException;
import org.lwjgl.opengl.ContextAttribs;
import org.lwjgl.opengl.Display;
import org.lwjgl.opengl.DisplayMode;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL15;
import org.lwjgl.opengl.GL20;
import org.lwjgl.opengl.GL30;
import org.lwjgl.opengl.PixelFormat;
import org.lwjgl.util.glu.GLU;
public class TheQuadExampleDrawArrays {
// Entry point for the application
public static void main(String[] args) {
new TheQuadExampleDrawArrays();
}
// Setup variables
private final String WINDOW_TITLE = "The Quad: glDrawArrays";
private final int WIDTH = 320;
private final int HEIGHT = 240;
// Quad variables
private int vaoId = 0;
private int vboId = 0;
private int vertexCount = 0;
public TheQuadExampleDrawArrays() {
// Initialize OpenGL (Display)
this.setupOpenGL();
this.setupQuad();
while (!Display.isCloseRequested()) {
// Do a single loop (logic/render)
this.loopCycle();
// Force a maximum FPS of about 60
Display.sync(60);
// Let the CPU synchronize with the GPU if GPU is tagging behind
Display.update();
}
// Destroy OpenGL (Display)
this.destroyOpenGL();
}
public void setupOpenGL() {
// Setup an OpenGL context with API version 3.2
try {
PixelFormat pixelFormat = new PixelFormat();
ContextAttribs contextAtrributes = new ContextAttribs(3, 2)
.withForwardCompatible(true)
.withProfileCore(true);
Display.setDisplayMode(new DisplayMode(WIDTH, HEIGHT));
Display.setTitle(WINDOW_TITLE);
Display.create(pixelFormat, contextAtrributes);
GL11.glViewport(0, 0, WIDTH, HEIGHT);
} catch (LWJGLException e) {
e.printStackTrace();
System.exit(-1);
}
// Setup an XNA like background color
GL11.glClearColor(0.4f, 0.6f, 0.9f, 0f);
// Map the internal OpenGL coordinate system to the entire screen
GL11.glViewport(0, 0, WIDTH, HEIGHT);
this.exitOnGLError("Error in setupOpenGL");
}
public void setupQuad() {
// OpenGL expects vertices to be defined counter clockwise by default
float[] vertices = {
// Left bottom triangle
-0.5f, 0.5f, 0f,
-0.5f, -0.5f, 0f,
0.5f, -0.5f, 0f,
// Right top triangle
0.5f, -0.5f, 0f,
0.5f, 0.5f, 0f,
-0.5f, 0.5f, 0f
};
// Sending data to OpenGL requires the usage of (flipped) byte buffers
FloatBuffer verticesBuffer = BufferUtils.createFloatBuffer(vertices.length);
verticesBuffer.put(vertices);
verticesBuffer.flip();
vertexCount = 6;
// Create a new Vertex Array Object in memory and select it (bind)
// A VAO can have up to 16 attributes (VBO's) assigned to it by default
vaoId = GL30.glGenVertexArrays();
GL30.glBindVertexArray(vaoId);
// Create a new Vertex Buffer Object in memory and select it (bind)
// A VBO is a collection of Vectors which in this case resemble the location of each vertex.
vboId = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vboId);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, verticesBuffer, GL15.GL_STATIC_DRAW);
// Put the VBO in the attributes list at index 0
GL20.glVertexAttribPointer(0, 3, GL11.GL_FLOAT, false, 0, 0);
// Deselect (bind to 0) the VBO
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
// Deselect (bind to 0) the VAO
GL30.glBindVertexArray(0);
this.exitOnGLError("Error in setupQuad");
}
public void loopCycle() {
GL11.glClear(GL11.GL_COLOR_BUFFER_BIT);
// Bind to the VAO that has all the information about the quad vertices
GL30.glBindVertexArray(vaoId);
GL20.glEnableVertexAttribArray(0);
// Draw the vertices
GL11.glDrawArrays(GL11.GL_TRIANGLES, 0, vertexCount);
// Put everything back to default (deselect)
GL20.glDisableVertexAttribArray(0);
GL30.glBindVertexArray(0);
this.exitOnGLError("Error in loopCycle");
}
public void destroyOpenGL() {
// Disable the VBO index from the VAO attributes list
GL20.glDisableVertexAttribArray(0);
// Delete the VBO
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
GL15.glDeleteBuffers(vboId);
// Delete the VAO
GL30.glBindVertexArray(0);
GL30.glDeleteVertexArrays(vaoId);
Display.destroy();
}
public void exitOnGLError(String errorMessage) {
int errorValue = GL11.glGetError();
if (errorValue != GL11.GL_NO_ERROR) {
String errorString = GLU.gluErrorString(errorValue);
System.err.println("ERROR - " + errorMessage + ": " + errorString);
if (Display.isCreated()) Display.destroy();
System.exit(-1);
}
}
}
This is a late answer but since the Z coordinate is 0 on all of your vertices shouldn't it be at the exact position of the near clipping plane and thus not render because it is out of range, so you should try moving the vertices back along the Z axis and it should theoretically render.

Android getOrientation() method returns bad results

I'm creating 3D Compass application.
I'm using getOrientation method to get orientation (almost same implementation like here). If I place phone on the table it works well, but when top of the phone points to the sky (minus Z axis on the picture; sphere is the Earth) getOrientation starts giving really bad results. It gives values for Z axis between 0 to 180 degrees in a few real degrees. Is there any way how to suppress this behavior? I created a little video what describes problem (sorry for bad quality). Thanks in advance.
Solution:
When you rotating model, there is difference between:
gl.glRotatef(_angleY, 0f, 1f, 0f); //ROLL
gl.glRotatef(_angleX, 1f, 0f, 0f); //ELEVATION
gl.glRotatef(_angleZ, 0f, 0f, 1f); //AZIMUTH
gl.glRotatef(_angleX, 1f, 0f, 0f); //ELEVATION
gl.glRotatef(_angleY, 0f, 1f, 0f); //ROLL
gl.glRotatef(_angleZ, 0f, 0f, 1f); //AZIMUTH
Well, I can see at least 1 problem with this approach of yours.
I assume that you combine a 3D vector corresponding to your magnetometer with an averaging low pass filter to smoothen the data. Although such approach would work great for a sensor value which varies without discontinuities, such as raw data from accelerometer, it doesn't work so great verbatim with angular variables fetched from your magnetometer. Why, one might ask?
Because those angular variables (azimuth, pitch, roll) have an upper-bound and a lower-bound, which means that any value above 180 degrees, say 181 degrees, would wrap around to 181-360 = -179 degrees, and any variable below -180 degrees would wrap around in the other direction. So when one of those angular variables get close to those thresholds (180 or -180), this variable will tend to oscillate to values close to those 2 extremes. When you blindly apply a low-pass filter to those values, you get either a smooth decreasing from 180 degrees towards -180 degrees, or a smooth increasing from -180 towards 180 degrees. Either way, the result would look quite like your video above... As long as one directly applies an averaging buffer onto the raw angle data from getOrientation(...), this problem will be present (and should be present not only for the case where the phone is upright, but also in the cases where there are azimuth angle wraparounds too... Maybe you could test for those bugs as well...).
You say that you tested this with a buffer size of 1. Theoretically, the problem should not be present if there is no averaging at all, although in some implementations of a circular buffer I've seen in the past, it could mean that there is still averaging done with at least 1 past value, not that there is no averaging at all. If this is your case, we have found the root cause of your bug.
Unfortunately, there isn't much of an elegant solution that could be implemented while sticking with your standard averaging filter. What I usually do in this case is switch to another type of low pass filter, which doesn't need any deep buffer to operate: a simple IIR filter (order 1):
diff = x[n] - y[n-1]
y[n] - y[n-1] = alpha * (x[n] - y[n-1]) = alpha * diff
...where y is the filtered angle, x is the raw angle, and alpha<1 is analogous to a time constant, as alpha=1 corresponds to the no-filter case, and the frequency cutoff of the low-pass filter gets lowered as alpha approaches zero. An acute eye would probably have noticed by now that this corresponds to a simple Proportional Controller.
Such a filter allows the compensation of the wraparound of the angle value because we can add or subtract 360 to diff so as to ensure that abs(diff)<=180, which in turn ensures that the filtered angle value will always increase/decrease in the optimal direction to reach its "setpoint".
An example function call, which is to be scheduled periodically, that calculates a filtered angle value y for a given raw angle value x, could be something like this:
private float restrictAngle(float tmpAngle){
while(tmpAngle>=180) tmpAngle-=360;
while(tmpAngle<-180) tmpAngle+=360;
return tmpAngle;
}
//x is a raw angle value from getOrientation(...)
//y is the current filtered angle value
private float calculateFilteredAngle(float x, float y){
final float alpha = 0.1f;
float diff = x-y;
//here, we ensure that abs(diff)<=180
diff = restrictAngle(diff);
y += alpha*diff;
//ensure that y stays within [-180, 180[ bounds
y = restrictAngle(y);
return y;
}
The function calculateFilteredAngle(float x, float y) can then be called periodically using something like this (example for azimuth angle from getOrientation(...) function:
filteredAzimuth = calculateFilteredAngle(azimuth, filteredAzimuth);
Using this method, the filter would not misbehave like the averaging filter as mentioned by the OP.
As I could not load the .apk uploaded by the OP, I decided to implement my own test project in order to see if the corrections work. Here is the entire code (it does not use a .XML for the main layout, so I did not include it). Simply copy it to a test project to see if it works on a specific device (tested functional on a HTC Desire w/ Android v. 2.1):
File 1: Compass3DActivity.java:
package com.epichorns.compass3D;
import android.app.Activity;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Bundle;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
public class Compass3DActivity extends Activity {
//Textviews for showing angle data
TextView mTextView_azimuth;
TextView mTextView_pitch;
TextView mTextView_roll;
TextView mTextView_filtered_azimuth;
TextView mTextView_filtered_pitch;
TextView mTextView_filtered_roll;
float mAngle0_azimuth=0;
float mAngle1_pitch=0;
float mAngle2_roll=0;
float mAngle0_filtered_azimuth=0;
float mAngle1_filtered_pitch=0;
float mAngle2_filtered_roll=0;
private Compass3DView mCompassView;
private SensorManager sensorManager;
//sensor calculation values
float[] mGravity = null;
float[] mGeomagnetic = null;
float Rmat[] = new float[9];
float Imat[] = new float[9];
float orientation[] = new float[3];
SensorEventListener mAccelerometerListener = new SensorEventListener(){
public void onAccuracyChanged(Sensor sensor, int accuracy) {}
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER){
mGravity = event.values.clone();
processSensorData();
}
}
};
SensorEventListener mMagnetometerListener = new SensorEventListener(){
public void onAccuracyChanged(Sensor sensor, int accuracy) {}
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD){
mGeomagnetic = event.values.clone();
processSensorData();
update();
}
}
};
private float restrictAngle(float tmpAngle){
while(tmpAngle>=180) tmpAngle-=360;
while(tmpAngle<-180) tmpAngle+=360;
return tmpAngle;
}
//x is a raw angle value from getOrientation(...)
//y is the current filtered angle value
private float calculateFilteredAngle(float x, float y){
final float alpha = 0.3f;
float diff = x-y;
//here, we ensure that abs(diff)<=180
diff = restrictAngle(diff);
y += alpha*diff;
//ensure that y stays within [-180, 180[ bounds
y = restrictAngle(y);
return y;
}
public void processSensorData(){
if (mGravity != null && mGeomagnetic != null) {
boolean success = SensorManager.getRotationMatrix(Rmat, Imat, mGravity, mGeomagnetic);
if (success) {
SensorManager.getOrientation(Rmat, orientation);
mAngle0_azimuth = (float)Math.toDegrees((double)orientation[0]); // orientation contains: azimut, pitch and roll
mAngle1_pitch = (float)Math.toDegrees((double)orientation[1]); //pitch
mAngle2_roll = -(float)Math.toDegrees((double)orientation[2]); //roll
mAngle0_filtered_azimuth = calculateFilteredAngle(mAngle0_azimuth, mAngle0_filtered_azimuth);
mAngle1_filtered_pitch = calculateFilteredAngle(mAngle1_pitch, mAngle1_filtered_pitch);
mAngle2_filtered_roll = calculateFilteredAngle(mAngle2_roll, mAngle2_filtered_roll);
}
mGravity=null; //oblige full new refresh
mGeomagnetic=null; //oblige full new refresh
}
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
LinearLayout ll = new LinearLayout(this);
LinearLayout.LayoutParams llParams = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.FILL_PARENT, LinearLayout.LayoutParams.FILL_PARENT);
ll.setLayoutParams(llParams);
ll.setOrientation(LinearLayout.VERTICAL);
ViewGroup.LayoutParams txtParams = new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
mTextView_azimuth = new TextView(this);
mTextView_azimuth.setLayoutParams(txtParams);
mTextView_pitch = new TextView(this);
mTextView_pitch.setLayoutParams(txtParams);
mTextView_roll = new TextView(this);
mTextView_roll.setLayoutParams(txtParams);
mTextView_filtered_azimuth = new TextView(this);
mTextView_filtered_azimuth.setLayoutParams(txtParams);
mTextView_filtered_pitch = new TextView(this);
mTextView_filtered_pitch.setLayoutParams(txtParams);
mTextView_filtered_roll = new TextView(this);
mTextView_filtered_roll.setLayoutParams(txtParams);
mCompassView = new Compass3DView(this);
ViewGroup.LayoutParams compassParams = new ViewGroup.LayoutParams(200,200);
mCompassView.setLayoutParams(compassParams);
ll.addView(mCompassView);
ll.addView(mTextView_azimuth);
ll.addView(mTextView_pitch);
ll.addView(mTextView_roll);
ll.addView(mTextView_filtered_azimuth);
ll.addView(mTextView_filtered_pitch);
ll.addView(mTextView_filtered_roll);
setContentView(ll);
sensorManager = (SensorManager) this.getSystemService(Context.SENSOR_SERVICE);
sensorManager.registerListener(mAccelerometerListener, sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_UI);
sensorManager.registerListener(mMagnetometerListener, sensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD), SensorManager.SENSOR_DELAY_UI);
update();
}
#Override
public void onDestroy(){
super.onDestroy();
sensorManager.unregisterListener(mAccelerometerListener);
sensorManager.unregisterListener(mMagnetometerListener);
}
private void update(){
mCompassView.changeAngles(mAngle1_filtered_pitch, mAngle2_filtered_roll, mAngle0_filtered_azimuth);
mTextView_azimuth.setText("Azimuth: "+String.valueOf(mAngle0_azimuth));
mTextView_pitch.setText("Pitch: "+String.valueOf(mAngle1_pitch));
mTextView_roll.setText("Roll: "+String.valueOf(mAngle2_roll));
mTextView_filtered_azimuth.setText("Azimuth: "+String.valueOf(mAngle0_filtered_azimuth));
mTextView_filtered_pitch.setText("Pitch: "+String.valueOf(mAngle1_filtered_pitch));
mTextView_filtered_roll.setText("Roll: "+String.valueOf(mAngle2_filtered_roll));
}
}
File 2: Compass3DView.java:
package com.epichorns.compass3D;
import android.content.Context;
import android.opengl.GLSurfaceView;
public class Compass3DView extends GLSurfaceView {
private Compass3DRenderer mRenderer;
public Compass3DView(Context context) {
super(context);
mRenderer = new Compass3DRenderer(context);
setRenderer(mRenderer);
}
public void changeAngles(float angle0, float angle1, float angle2){
mRenderer.setAngleX(angle0);
mRenderer.setAngleY(angle1);
mRenderer.setAngleZ(angle2);
}
}
File 3: Compass3DRenderer.java:
package com.epichorns.compass3D;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.opengl.GLSurfaceView;
public class Compass3DRenderer implements GLSurfaceView.Renderer {
Context mContext;
// a raw buffer to hold indices
ShortBuffer _indexBuffer;
// raw buffers to hold the vertices
FloatBuffer _vertexBuffer0;
FloatBuffer _vertexBuffer1;
FloatBuffer _vertexBuffer2;
FloatBuffer _vertexBuffer3;
FloatBuffer _vertexBuffer4;
FloatBuffer _vertexBuffer5;
int _numVertices = 3; //standard triangle vertices = 3
FloatBuffer _textureBuffer0123;
//private FloatBuffer _light0Position;
//private FloatBuffer _light0Ambient;
float _light0Position[] = new float[]{10.0f, 10.0f, 10.0f, 0.0f};
float _light0Ambient[] = new float[]{0.05f, 0.05f, 0.05f, 1.0f};
float _light0Diffuse[] = new float[]{0.5f, 0.5f, 0.5f, 1.0f};
float _light0Specular[] = new float[]{0.7f, 0.7f, 0.7f, 1.0f};
float _matAmbient[] = new float[] { 0.6f, 0.6f, 0.6f, 1.0f };
float _matDiffuse[] = new float[] { 0.6f, 0.6f, 0.6f, 1.0f };
private float _angleX=0f;
private float _angleY=0f;
private float _angleZ=0f;
Compass3DRenderer(Context context){
super();
mContext = context;
}
public void setAngleX(float angle) {
_angleX = angle;
}
public void setAngleY(float angle) {
_angleY = angle;
}
public void setAngleZ(float angle) {
_angleZ = angle;
}
FloatBuffer InitFloatBuffer(float[] src){
ByteBuffer bb = ByteBuffer.allocateDirect(4*src.length);
bb.order(ByteOrder.nativeOrder());
FloatBuffer inBuf = bb.asFloatBuffer();
inBuf.put(src);
return inBuf;
}
ShortBuffer InitShortBuffer(short[] src){
ByteBuffer bb = ByteBuffer.allocateDirect(2*src.length);
bb.order(ByteOrder.nativeOrder());
ShortBuffer inBuf = bb.asShortBuffer();
inBuf.put(src);
return inBuf;
}
//Init data for our rendered pyramid
private void initTriangles() {
//Side faces triangles
float[] coords = {
-0.25f, -0.5f, 0.25f,
0.25f, -0.5f, 0.25f,
0f, 0.5f, 0f
};
float[] coords1 = {
0.25f, -0.5f, 0.25f,
0.25f, -0.5f, -0.25f,
0f, 0.5f, 0f
};
float[] coords2 = {
0.25f, -0.5f, -0.25f,
-0.25f, -0.5f, -0.25f,
0f, 0.5f, 0f
};
float[] coords3 = {
-0.25f, -0.5f, -0.25f,
-0.25f, -0.5f, 0.25f,
0f, 0.5f, 0f
};
//Base triangles
float[] coords4 = {
-0.25f, -0.5f, 0.25f,
0.25f, -0.5f, -0.25f,
0.25f, -0.5f, 0.25f
};
float[] coords5 = {
-0.25f, -0.5f, 0.25f,
-0.25f, -0.5f, -0.25f,
0.25f, -0.5f, -0.25f
};
float[] textures0123 = {
// Mapping coordinates for the vertices (UV mapping CW)
0.0f, 0.0f, // bottom left
1.0f, 0.0f, // bottom right
0.5f, 1.0f, // top ctr
};
_vertexBuffer0 = InitFloatBuffer(coords);
_vertexBuffer0.position(0);
_vertexBuffer1 = InitFloatBuffer(coords1);
_vertexBuffer1.position(0);
_vertexBuffer2 = InitFloatBuffer(coords2);
_vertexBuffer2.position(0);
_vertexBuffer3 = InitFloatBuffer(coords3);
_vertexBuffer3.position(0);
_vertexBuffer4 = InitFloatBuffer(coords4);
_vertexBuffer4.position(0);
_vertexBuffer5 = InitFloatBuffer(coords5);
_vertexBuffer5.position(0);
_textureBuffer0123 = InitFloatBuffer(textures0123);
_textureBuffer0123.position(0);
short[] indices = {0, 1, 2};
_indexBuffer = InitShortBuffer(indices);
_indexBuffer.position(0);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
gl.glEnable(GL10.GL_CULL_FACE); // enable the differentiation of which side may be visible
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glFrontFace(GL10.GL_CCW); // which is the front? the one which is drawn counter clockwise
gl.glCullFace(GL10.GL_BACK); // which one should NOT be drawn
initTriangles();
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
public void onDrawFrame(GL10 gl) {
gl.glPushMatrix();
gl.glClearColor(0, 0, 0, 1.0f); //clipping backdrop color
// clear the color buffer to show the ClearColor we called above...
gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
// set rotation
gl.glRotatef(_angleY, 0f, 1f, 0f); //ROLL
gl.glRotatef(_angleX, 1f, 0f, 0f); //ELEVATION
gl.glRotatef(_angleZ, 0f, 0f, 1f); //AZIMUTH
//Draw our pyramid
//4 side faces
gl.glColor4f(0.5f, 0f, 0f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer0);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glColor4f(0.5f, 0.5f, 0f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer1);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glColor4f(0f, 0.5f, 0f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer2);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glColor4f(0f, 0.5f, 0.5f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer3);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
//Base face
gl.glColor4f(0f, 0f, 0.5f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer4);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer5);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glPopMatrix();
}
public void onSurfaceChanged(GL10 gl, int w, int h) {
gl.glViewport(0, 0, w, h);
gl.glViewport(0, 0, w, h);
}
}
Please note that this code does not compensate for tablet default landscape orientation, so it is only expected to work correctly on a phone (I didn't have a tablet close by to test any correction code).
You should probably try a longer delay like Game and/or keep/increase the size of your circular buffer. The sensors (accelerometer, compass, etc.) on mobile devices are inherently noisy so when I asked about 'low pass filter', I meant do you use more data to decrease the frequency of your app usable updates. Your video was done inside, I would also recommend going to a place with less EM interference such as a park just to check that the behavior is consistent as well as the standard compass reset action (rotate device in figure-8). In the end you may have to apply some heuristics to throw out the 'bad' data to make a smoother experience for the user.
Well I had exactly the same problem as I was retrieving orientation. Thing is that I didn't get is solved (I had to set a constraint when it comes to the device position when retrieving it), and I don't know if you'll ever be able to.
Pick a magnetical compass and try to get north orientation when the compass is in the situation you describe - you will get the same non-sense results. So you can't really expect the device's compass to do it any better !
Few words about filtering, with your permissions.
I would suggest to do averaging on Magnetic Field Vector itself before turning it into angles.
It is wrong to do averaging/smoothing only on angles without use of some sort of magnitude. Angles themselves are not providing enough data to detect direction/heading/bearing.
Example: When you want to know average wind direction during the whole day you must use the strength of the wind, not just only angles. If you will average only angles you will get absolutely wrong wind direction.
As for bearing direction I would use the speed for magnitude.

How to implement glColorPointer in AndEngine Rectangle class

Would anyone share an example on how to implement GradienRectangle that would have different colors for each vertex?
I've tried to call glColorPointer from GL10 passing float buffer and GL11 using similar to vertices selectOnHardware approach but both methods failed for me...
On AndEngine forum I found this code, but it does not work, however maby it will help someone to find a better solution.
that example does not work for you because author have not shown piece of code responsible for setting vertexes.
Here is my example(it is long, but that's opengl...)
NOTE - remember to setUp viewport correctly.
public static void drawGradientRectangle(GL10 gl, float centerX, float centerY,
float width, float height) {
gl.glPushMatrix();
gl.glDisable(GL10.GL_TEXTURE_2D);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY); //just in case if you have not done that before
gl.glFrontFace(GL10.GL_CCW); //Set the face
gl.glTranslatef(centerX, centerY, 0);
if (width != 1 || height != 1) {
gl.glScalef(width, height, 1);
}
gl.glVertexPointer(2, GL10.GL_FLOAT, 0, GLDrawConstants.vertexBuffer0_5);
gl.glColorPointer(4, GL10.GL_FLOAT, 0, GLDrawConstants.gradOrangeWhiteBuffer);
// Draw the vertices as triangle strip
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glDisableClientState(GL10.GL_COLOR_ARRAY);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glPopMatrix();
}
GLDrawConstants class:
public class GLDrawConstants {
public static final FloatBuffer gradOrangeWhiteBuffer;
public static final FloatBuffer vertexBuffer0_5;
private static final float vertices0_5[] = {
-0.5f, -0.5f,// Bottom Left
0.5f, -0.5f,// Bottom right
-0.5f, 0.5f,// Top Left
0.5f, 0.5f// Top Right
};
private static final float gradOrangeWhiteColor[] = {
255/255f, 239/255f, 196/255f, 0f, // Bottom Left
255/255f, 239/255f, 196/255f, 0f, // Bottom right
250/255f, 200/255f, 62/255f, 0.3f, // Top Left
250/255f, 200/255f, 62/255f, 0.3f // Top Right
};
static {
gradOrangeWhiteBuffer = WDUtils.floatBuffer(gradOrangeWhiteColor);
vertexBuffer0_5 = WDUtils.floatBuffer(vertices0_5);
}
}
WDUtils class:
public class WDUtils {
/**
* Make a direct NIO FloatBuffer from an array of floats
*
* #param arr
* The array
* #return The newly created FloatBuffer
*/
public static final FloatBuffer floatBuffer(float[] arr) {
ByteBuffer bb = ByteBuffer.allocateDirect(arr.length * 4);
bb.order(ByteOrder.nativeOrder());
FloatBuffer fb = bb.asFloatBuffer();
fb.put(arr);
fb.position(0);
return fb;
}
}
Or you use (or backport) the Gradient class from the GLES2-AnchorCenter branch, which gives you a super easy to use and feature rich API without worrying about OpenGL. =)

Categories

Resources