Related
I’ve been developing a cube program that provides a number of cubes with desired qualities. However, whenever I try to light a textured cube, my cube becomes very dark. The lighting works well with a non-textured cube so I’m led to believe it’s done properly just as a simple textured cube without lighting works. There doesn’t seem to be significant documentation on how to solve this in OpenGL 2.0+ but there are a few things pertaining to older versions.
The following link offers information as to why my cube is behaving as it is, but I’m having trouble translating the solution to a newer version, especially within my shader code where I’m unsure if further changes should occur. I am using Android Studio 2.1.3 if that and its contained emulators would pose issues to the desired effect. If anyone could offer any advice, I’d greatly appreciate it. I have a separate (large) renderer that calls for the Cube to be drawn, let me know if that code would be beneficial as well in addition to my Cube. Below is my Cube:
public class TexturedLightCube {
/** Cube vertices */
private static final float VERTICES[] = {
-0.3f, -0.3f, -0.3f, //top front right
0.3f, -0.3f, -0.3f, //bottom front right
0.3f, 0.3f, -0.3f, //bottom front left
-0.3f, 0.3f, -0.3f, //top front left
-0.3f, -0.3f, 0.3f, //top back right
0.3f, -0.3f, 0.3f, //bottom back right
0.3f, 0.3f, 0.3f, //bottom back left
-0.3f, 0.3f, 0.3f // top back left
};
/** Vertex colors. */
private static final float COLORS[] = {
0.0f, 1.0f, 1.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f,
0.0f, 1.0f, 0.0f, 1.0f,
0.0f, 0.0f, 1.0f, 1.0f,
1.0f, 0.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f, 1.0f,
0.0f, 1.0f, 1.0f, 1.0f,
};
/** Order to draw vertices as triangles. */
private static final byte INDICES[] = {
0, 1, 3, 3, 1, 2, // Front face.
0, 1, 4, 4, 5, 1, // Bottom face.
1, 2, 5, 5, 6, 2, // Right face.
2, 3, 6, 6, 7, 3, // Top face.
3, 7, 4, 4, 3, 0, // Left face.
4, 5, 7, 7, 6, 5, // Rear face.
};
private static final float TEXTURECOORDS[] =
{
0.0f, 1.0f, //left-bottom
0.0f, 0.0f, //right bottom
1.0f, 0.0f, //left top
1.0f, 1.0f, //right top
0.0f, 1.0f, //left-bottom
0.0f, 0.0f, //right bottom
1.0f, 0.0f, //left top
1.0f, 1.0f, //right top
};
private static final float NORMALS[] = {
//set all normals to all light for testing
1.0f, 1.0f, 1.0f, //top front right
1.0f, 0.0f, 1.0f, //bottom front right
0.0f, 0.0f, 1.0f, //bottom front left
0.0f, 1.0f, 1.0f, //top front left
1.0f, 1.0f, 0.0f, //top back right
1.0f, 0.0f, 0.0f, //bottom back right
0.0f, 0.0f, 0.0f, //bottom back left
0.0f, 1.0f, 0.0f //top back left
};
static final int COORDS_PER_VERTEX = 3;
private static final int VALUES_PER_COLOR = 4;
/** Vertex size in bytes. */
final int VERTEX_STRIDE = COORDS_PER_VERTEX * 4;
/** Color size in bytes. */
private final int COLOR_STRIDE = VALUES_PER_COLOR * 4;
/** Shader code for the vertex. */
private static final String VERTEX_SHADER_CODE =
"uniform mat4 uMVPMatrix;" +
"uniform mat4 uMVMatrix;" +
"uniform vec3 u_LightPos;" +
"attribute vec4 vPosition;" +
"attribute vec4 a_Color;" +
"attribute vec3 a_Normal;" +
"varying vec4 v_Color;" +
"attribute vec2 a_TexCoordinate;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
"vec3 modelViewVertex = vec3(uMVMatrix * vPosition);"+
"vec3 modelViewNormal = vec3(uMVMatrix * vec4(a_Normal, 0.0));" +
"float distance = length(u_LightPos - modelViewVertex);" +
"vec3 lightVector = normalize(u_LightPos - modelViewVertex);" +
"float diffuse = max(dot(modelViewNormal, lightVector), 0.1);" +
"diffuse = diffuse * (1.0/(1.0 + (0.00000000000002 * distance * distance)));" + //attenuation factor
"v_Color = a_Color * a_Color * diffuse;" +
"gl_Position = uMVPMatrix * vPosition;" +
"v_TexCoordinate = a_TexCoordinate;" +
"}";
/** Shader code for the fragment. */
private static final String FRAGMENT_SHADER_CODE =
"precision mediump float;" +
"varying vec4 v_Color;" +
"uniform sampler2D u_Texture;"+ //The input texture
"varying vec2 v_TexCoordinate;" +
"void main() {" +
" gl_FragColor = v_Color * texture2D(u_Texture, v_TexCoordinate) ;" + //still works with just color
"}";
private int mTextureUniformHandle; //Pass in texture.
private int mTextureCoordinateHandle; //Pass in model texture coordinate information.
private final int mTextureCoordinateDataSize = 2; //Size of texture coordinate data in elements
public static int mTextureDataHandle; //Handle to texturedata;
private final FloatBuffer mTextureBuffer; //Store model data in float buffer.
private final FloatBuffer mVertexBuffer;
private final FloatBuffer mColorBuffer;
private final FloatBuffer mNormalBuffer;
private final ByteBuffer mIndexBuffer;
private final int mProgram;
private final int mPositionHandle;
private final int mColorHandle;
private final int mMVPMatrixHandle;
private final int mNormalHandle;
public static int mLightPosHandle;
public final int mMVMatrixHandle;
public static int loadTexture(final Context context, final int resourceId) {
//Get the texture from the Android resource directory
final int[] textureHandle = new int[1];
InputStream is = context.getResources().openRawResource(+ R.drawable.teneighty);
Bitmap bitmap = null;
try {
//BitmapFactory is an Android graphics utility for images
bitmap = BitmapFactory.decodeStream(is);
} finally {
//Always clear and close
try {
is.close();
is = null;
} catch (IOException e) {
}
}
//Generate one texture pointer...
GLES20.glGenTextures(1, textureHandle, 0);
//and bind it to our array.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
//Create Nearest Filtered Texture.
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
//Accounting for different texture parameters.
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
//Use the Android GLUtils to specify a two-dimensional texture image from our bitmap.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
//Clean up
bitmap.recycle();
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture");
}
return textureHandle[0];
}
public TexturedLightCube() {
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(VERTICES.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
mVertexBuffer = byteBuffer.asFloatBuffer();
mVertexBuffer.put(VERTICES);
mVertexBuffer.position(0);
byteBuffer = ByteBuffer.allocateDirect(COLORS.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
mColorBuffer = byteBuffer.asFloatBuffer();
mColorBuffer.put(COLORS);
mColorBuffer.position(0);
byteBuffer = ByteBuffer.allocateDirect(NORMALS.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
mNormalBuffer = byteBuffer.asFloatBuffer();
mNormalBuffer.put(NORMALS);
mNormalBuffer.position(0);
byteBuffer = ByteBuffer.allocateDirect(TEXTURECOORDS.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
mTextureBuffer = byteBuffer.asFloatBuffer();
mTextureBuffer.put(TEXTURECOORDS);
mTextureBuffer.position(0);
mIndexBuffer = ByteBuffer.allocateDirect(INDICES.length);
mIndexBuffer.put(INDICES);
mIndexBuffer.position(0);
mProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(mProgram, loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_CODE));
GLES20.glAttachShader(mProgram, loadShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_CODE));
GLES20.glLinkProgram(mProgram);
mTextureDataHandle = GLES20.glGetUniformLocation(mProgram, "u_Texture");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "a_TexCoordinate");
mTextureUniformHandle = GLES20.glGetUniformLocation(mProgram, "u_Texture");
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
mMVMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVMatrix");
mLightPosHandle = GLES20.glGetUniformLocation(mProgram, "u_LightPos");
mNormalHandle = GLES20.glGetAttribLocation(mProgram, "a_Normal");
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
mColorHandle = GLES20.glGetAttribLocation(mProgram, "a_Color");
}
/**
* Encapsulates the OpenGL ES instructions for drawing this shape.
*
* #param mvpMatrix The Model View Project matrix in which to draw this shape
*/
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment.
GLES20.glUseProgram(mProgram);
//set active texture unit to texture unit 0.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
// Prepare the cube coordinate data.
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, VERTEX_STRIDE, mVertexBuffer);
// Prepare the cube color data.
GLES20.glEnableVertexAttribArray(mColorHandle);
GLES20.glVertexAttribPointer(mColorHandle, 4, GLES20.GL_FLOAT, false, COLOR_STRIDE, mColorBuffer);
//Will have the same size as Vertex as we are implementing per vertex lighting
GLES20.glEnableVertexAttribArray(mNormalHandle);
GLES20.glVertexAttribPointer(mNormalHandle, 3, GLES20.GL_FLOAT, false, VERTEX_STRIDE, mNormalBuffer);
// Prepare the cube texture data.
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
//Pass texture coordinate information.
GLES20.glVertexAttribPointer(mTextureCoordinateHandle,4, GLES20.GL_FLOAT, false, mTextureCoordinateDataSize, mTextureBuffer);
// Apply the projection and view transformation.
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
GLES20.glUniform3f(LightCube.mLightPosHandle, MyGLRenderer.mLightPosInEyeSpace[0], MyGLRenderer.mLightPosInEyeSpace[1], MyGLRenderer.mLightPosInEyeSpace[2]);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glUniform1i(mTextureUniformHandle, 0);
// Draw the cube.
GLES20.glDrawElements(GLES20.GL_TRIANGLES, INDICES.length, GLES20.GL_UNSIGNED_BYTE, mIndexBuffer); //-removed indices-
// Disable vertex arrays.
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(mTextureCoordinateHandle);
GLES20.glDisableVertexAttribArray(mColorHandle);
GLES20.glDisableVertexAttribArray(mNormalHandle);
}
/** Loads the provided shader in the program. */
private static int loadShader(int type, String shaderCode){
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
You're missing an ambient component to your lighting, which emulates second order (and higher) reflections you would get in real life, but can't get directly in a rasterizer.
Not sure why you are squaring a_Color in your fragment shader. This will definitely make things darker because all values are between 0 and 1; e.g. 0.1^2 == 0.01.
Remember that your dot product might be negative, so you want to clamp out negative diffuse components (e.g. no light intensity on surfaces which are facing away from the light).
I've spent days searching, trying tutorials, and not actually getting results in this, so here I am.
I'm trying, simply put, to animate a collection of objects (Android Studio) on the screen, in a 2D format, with each independent movements and rotations. However, when I try this, I'm either not getting the object rendered, or its rendering skewed (as if rotated through the vertical Y-axis)
I know the importance of the order in which objects are drawn too (to give correct Z-ordering appearance) however, I'm at a bit of a loss with the matrix manipulation.
Here is what I have so far:
Main Activity - standard stuff
private GLSurfaceView mGLSurfaceView;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mGLSurfaceView = new GLSurfaceView(this);
//check if device supports ES 2.0
final ActivityManager activityManager = (ActivityManager) getSystemService(Context.ACTIVITY_SERVICE);
final ConfigurationInfo configurationInfo = activityManager.getDeviceConfigurationInfo();
final boolean supportsEs2 = configurationInfo.reqGlEsVersion >= 0x20000;
if (supportsEs2) {
//Get the ES2 compatible context
mGLSurfaceView.setEGLContextClientVersion(2);
//set renderer to my renderer below
mGLSurfaceView.setRenderer(new MyGL20Renderer(this));
} else {
//no support
return;
}
//setContentView(R.layout.activity_main);
setContentView(mGLSurfaceView);
}
GL20Renderer class - Notice I'm now just manually adding 2 objects to my collection to render
public class MyGL20Renderer implements GLSurfaceView.Renderer
{
private final Context mActivityContext;
//Matrix Initializations
private final float[] mMVPMatrix = new float[16];
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private float[] mRotationMatrix = new float[16];
private final float[] mRotateMatrix = new float[16];
private final float[] mMoveMatrix = new float[16];
private final float[] mTempMatrix = new float[16];
private final float[] mModelMatrix = new float[16];
private int numObjects = 2;
private ArrayList<Sprite> spriteList = new ArrayList<Sprite>();
//Declare as volatile because we are updating it from another thread
public volatile float mAngle;
//private Triangle triangle;
//private Sprite sprite;
public MyGL20Renderer(final Context activityContext)
{
mActivityContext = activityContext;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config)
{
//Set the background frame color
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
//Set the camera position (View Matrix) //mtx, offset, eyex,y,z, centrex,y,z, upx,y,z
Matrix.setLookAtM(mVMatrix, 0,
0, 0, -1.5f, //Eye XYZ - position eye behind the origin
0f, 0f, -5.0f, //Look XYZ - We are looking toward the distance
0f, 1.0f, 0.0f); //Up XYZ - Up vector - where head would be pointing if holding the camera
//Initialize Shapes
//triangle = new Triangle();
//sprite = new Sprite(mActivityContext);
//Sprite newSprite;
float xMax = 2.0f;
float yMax = 2.0f;
//rand = 0->1
float newX = (new Random().nextFloat() * xMax * 2) - xMax; //2.0f; //-2 -> +2
float newY = (new Random().nextFloat() * yMax * 2) - yMax; //-3 -> +3
float newZ = 0f;
//for (int i=0; i<numObjects; i++) {
//newSprite = new Sprite(mActivityContext);
//spriteList.add(new Sprite(mActivityContext, newX, newY, newZ));
//}
spriteList.add(new Sprite(mActivityContext, -0.0f, -0.0f, 0.0f));
spriteList.add(new Sprite(mActivityContext, +0.5f, -0.5f, 0.0f));
//spriteList.add(new Sprite(mActivityContext, -1.0f, +1.0f, 0.0f));
//spriteList.add(new Sprite(mActivityContext, +1.0f, +1.0f, 0.0f));
}
public void onDrawFrame(GL10 unused)
{
//init
Sprite currSprite;
//Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
//timing
float jFactor = 0.1f;
long time = SystemClock.uptimeMillis() % 10000L;
float angleInDegrees = (360.0f / 1000.0f) * ((int) time) * jFactor;
/*
//number 1
//Matrix.setIdentityM(mModelMatrix, 0);
//currSprite = spriteList.get(0);
//Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
//currSprite.Draw(mModelMatrix);
//number 2
Matrix.setIdentityM(mModelMatrix, 0);
currSprite = spriteList.get(1);
Matrix.translateM(mModelMatrix, 0, 0.0f, -0.1f, 0.0f);
//Matrix.rotateM(mModelMatrix, 0, 90.0f, 1.0f, 0.0f, 0.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
currSprite.Draw(mModelMatrix);
//Matrix.translateM(mModelMatrix, 0, 0, 0, 4.0f);
*/
//Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
//zoom out a bit?
Matrix.translateM(mMVPMatrix, 0, 0, 0, 4.0f);
//number 1
//currSprite = spriteList.get(0);
//Matrix.setIdentityM(mMVPMatrix, 0);
//Matrix.rotateM(mMVPMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
//Matrix.translateM(mMVPMatrix, 0, currSprite.coordX, 0.0f, 0.0f);
//currSprite.coordX += 0.01f;
//currSprite.Draw(mMVPMatrix);
//number 2
currSprite = spriteList.get(0);
Matrix.setIdentityM(mMVPMatrix, 0);
Matrix.translateM(mMVPMatrix, 0, 0.0f, 0.0f, 0.0f);
Matrix.rotateM(mMVPMatrix, 0, angleInDegrees, 0.0f, 0.0f, +1.0f);
//float[] mTempMatrix = new float[16];
//mTempMatrix = mModelMatrix.clone();
//Matrix.multiplyMM(mMVPMatrix, 0, mMVPMatrix, 0, mRotateMatrix, 0);
//mTempMatrix = mMVPMatrix.clone();
//Matrix.multiplyMM(mMVPMatrix, 0, mTempMatrix, 0, mModelMatrix, 0);
//Matrix.setIdentityM(mMVPMatrix, 0);
currSprite.Draw(mMVPMatrix);
/*
//Set the camera position (View Matrix) //mtx, offset, eyex,y,z, centrex,y,z, upx,y,z
Matrix.setLookAtM(mVMatrix, 0,
0, 0, -10,
0f, 0f, 0f,
0f, 1.0f, 0.0f);
//Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
//zoom out a bit?
Matrix.translateM(mMVPMatrix, 0, 0, 0, 4.0f);
for (int i=0; i<numObjects; i++) {
//Create a rotation transformation for the triangle
//Matrix.setRotateM(mRotationMatrix, 0, mAngle, 0, 0, -1.0f);
Matrix.setRotateM(mRotationMatrix, 0, 0, 0, 0, -1.0f); //-1.0 = Z, for some reason need this. Grr
//Combine the rotation matrix with the projection and camera view
Matrix.multiplyMM(mMVPMatrix, 0, mRotationMatrix, 0, mMVPMatrix, 0);
//Draw Shape
//triangle.Draw(mMVPMatrix);
//sprite.Draw(mMVPMatrix);
currSprite = spriteList.get(i);
//Move the object to the passed initial coordinates?
//Matrix.translateM(mMVPMatrix, 0, currSprite.coordX, currSprite.coordY, currSprite.coordZ);
currSprite.Draw(mMVPMatrix);
}
*/
}
public void onSurfaceChanged(GL10 unused, int width, int height)
{
GLES20.glViewport(0, 0, width, height);
if (height == 0) {
height = 1; //incase of div 0 errors
}
float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 10.f;
//This Projection Matrix is applied to object coordinates in the onDrawFrame() method
//Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
Matrix.frustumM(mProjMatrix, 0, left, right, bottom, top, near, far);
}
public static int loadShader(int type, String shaderCode)
{
//Create a Vertex Shader Type Or a Fragment Shader Type (GLES20.GL_VERTEX_SHADER OR GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
//Add The Source Code and Compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
Please excuse the commented code in OnDrawFrame() where I've been experimenting, and failing.
Sprite Class
public class Sprite
{
//Reference to Activity Context
private final Context mActivityContext;
//Added for Textures
private final FloatBuffer mCubeTextureCoordinates;
private int mTextureUniformHandle;
private int mTextureCoordinateHandle;
private final int mTextureCoordinateDataSize = 2;
private int mTextureDataHandle;
private final String vertexShaderCode =
//Test
"attribute vec2 a_TexCoordinate;" +
"varying vec2 v_TexCoordinate;" +
//End Test
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition * uMVPMatrix;" +
//Test
"v_TexCoordinate = a_TexCoordinate;" +
//End Test
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
//Test
"uniform sampler2D u_Texture;" +
"varying vec2 v_TexCoordinate;" +
//End Test
"void main() {" +
//"gl_FragColor = vColor;" +
"gl_FragColor = (vColor * texture2D(u_Texture, v_TexCoordinate));" +
"}";
private final int shaderProgram;
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
public float coordX;
public float coordY;
//public float coordZ;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 2;
static float spriteCoords[] = { -0.5f, 0.5f, // top left
-0.5f, -0.5f, // bottom left
0.5f, -0.5f, // bottom right
0.5f, 0.5f }; //top right
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; //Order to draw vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; //Bytes per vertex
// Set color with red, green, blue and alpha (opacity) values
//float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };
float color[] = { 1f, 1f, 1f, 1.0f };
public Sprite(final Context activityContext, float initX, float initY, float initZ)
{
mActivityContext = activityContext;
this.coordX = initX;
this.coordY = initY;
//this.coordZ = initZ;
//ergh - will do manually for now. Paxo n00b
//just a 2D array, no need for Z nonsense
for (int i=0; i<spriteCoords.length; i++) {
spriteCoords[i] -= (i%2==0) ? coordX : coordY; //- works better than +
}
//float newPosMatrix[] = { initX, initY, 0f };
//adjust the vector coords accordingly
//Matrix.multiplyMV(spriteCoords, 0, newPosMatrix, 0, spriteCoords, 0);
//Initialize Vertex Byte Buffer for Shape Coordinates / # of coordinate values * 4 bytes per float
ByteBuffer bb = ByteBuffer.allocateDirect(spriteCoords.length * 4);
//Use the Device's Native Byte Order
bb.order(ByteOrder.nativeOrder());
//Create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
//Add the coordinates to the FloatBuffer
vertexBuffer.put(spriteCoords);
//Set the Buffer to Read the first coordinate
vertexBuffer.position(0);
// S, T (or X, Y)
// Texture coordinate data.
// Because images have a Y axis pointing downward (values increase as you move down the image) while
// OpenGL has a Y axis pointing upward, we adjust for that here by flipping the Y axis.
// What's more is that the texture coordinates are the same for every face.
final float[] cubeTextureCoordinateData =
{
//Front face
/*0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f*/
/*-0.5f, 0.5f,
-0.5f, -0.5f,
0.5f, -0.5f,
0.5f, 0.5f*/
0f, 1f,
0f, 0f,
1f, 0f,
1f, 1f
};
mCubeTextureCoordinates = ByteBuffer.allocateDirect(cubeTextureCoordinateData.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubeTextureCoordinates.put(cubeTextureCoordinateData).position(0);
//Initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(spriteCoords.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
int vertexShader = MyGL20Renderer.loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = MyGL20Renderer.loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
shaderProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(shaderProgram, vertexShader);
GLES20.glAttachShader(shaderProgram, fragmentShader);
//Texture Code
GLES20.glBindAttribLocation(shaderProgram, 0, "a_TexCoordinate");
GLES20.glLinkProgram(shaderProgram);
//Load the texture
mTextureDataHandle = loadTexture(mActivityContext, R.drawable.cube);
}
public void Draw(float[] mvpMatrix)
{
//Add program to OpenGL ES Environment
GLES20.glUseProgram(shaderProgram);
//Get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(shaderProgram, "vPosition");
//Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
//Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
//Get Handle to Fragment Shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(shaderProgram, "vColor");
//Set the Color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
//Set Texture Handles and bind Texture
mTextureUniformHandle = GLES20.glGetAttribLocation(shaderProgram, "u_Texture");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(shaderProgram, "a_TexCoordinate");
//Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
//Bind the texture to this unit.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
//Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
//Pass in the texture coordinate information
mCubeTextureCoordinates.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false, 0, mCubeTextureCoordinates);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
//Get Handle to Shape's Transformation Matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(shaderProgram, "uMVPMatrix");
//Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
//glTranslatef(0f, 0f, 0f);
//Draw the triangle
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
//Disable Vertex Array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
public static int loadTexture(final Context context, final int resourceId)
{
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0)
{
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling
// Read in the resource
final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
}
Now, I don't know if I'm going about this the right way at all, but I simply want to just animate the collection of Sprite objects in spriteList.
More specifically, have a collection of 3 objects and then respond to screen touch, and animate the objects to that location (but that will come later)
Initially, I just want to be able to correctly render these objects (with initial locations) and then rotate them on the centre point (about the Z axis).
For some reason, TranslateM is warping the texture (as if about the Y axis) and not actually moving an object along the X/Y planes
Many thanks for any help you can offer. As you can see I'm fairly new to OpenGL and have had little luck with the limited tutorials out there that support Android Studio and GLES2.0.
Kind regards,
James
I think the problem is that you have not multiplied the translation matrices into your rotation matrices. A matrix multiply is required to combine those.
I'm creating 3D Compass application.
I'm using getOrientation method to get orientation (almost same implementation like here). If I place phone on the table it works well, but when top of the phone points to the sky (minus Z axis on the picture; sphere is the Earth) getOrientation starts giving really bad results. It gives values for Z axis between 0 to 180 degrees in a few real degrees. Is there any way how to suppress this behavior? I created a little video what describes problem (sorry for bad quality). Thanks in advance.
Solution:
When you rotating model, there is difference between:
gl.glRotatef(_angleY, 0f, 1f, 0f); //ROLL
gl.glRotatef(_angleX, 1f, 0f, 0f); //ELEVATION
gl.glRotatef(_angleZ, 0f, 0f, 1f); //AZIMUTH
gl.glRotatef(_angleX, 1f, 0f, 0f); //ELEVATION
gl.glRotatef(_angleY, 0f, 1f, 0f); //ROLL
gl.glRotatef(_angleZ, 0f, 0f, 1f); //AZIMUTH
Well, I can see at least 1 problem with this approach of yours.
I assume that you combine a 3D vector corresponding to your magnetometer with an averaging low pass filter to smoothen the data. Although such approach would work great for a sensor value which varies without discontinuities, such as raw data from accelerometer, it doesn't work so great verbatim with angular variables fetched from your magnetometer. Why, one might ask?
Because those angular variables (azimuth, pitch, roll) have an upper-bound and a lower-bound, which means that any value above 180 degrees, say 181 degrees, would wrap around to 181-360 = -179 degrees, and any variable below -180 degrees would wrap around in the other direction. So when one of those angular variables get close to those thresholds (180 or -180), this variable will tend to oscillate to values close to those 2 extremes. When you blindly apply a low-pass filter to those values, you get either a smooth decreasing from 180 degrees towards -180 degrees, or a smooth increasing from -180 towards 180 degrees. Either way, the result would look quite like your video above... As long as one directly applies an averaging buffer onto the raw angle data from getOrientation(...), this problem will be present (and should be present not only for the case where the phone is upright, but also in the cases where there are azimuth angle wraparounds too... Maybe you could test for those bugs as well...).
You say that you tested this with a buffer size of 1. Theoretically, the problem should not be present if there is no averaging at all, although in some implementations of a circular buffer I've seen in the past, it could mean that there is still averaging done with at least 1 past value, not that there is no averaging at all. If this is your case, we have found the root cause of your bug.
Unfortunately, there isn't much of an elegant solution that could be implemented while sticking with your standard averaging filter. What I usually do in this case is switch to another type of low pass filter, which doesn't need any deep buffer to operate: a simple IIR filter (order 1):
diff = x[n] - y[n-1]
y[n] - y[n-1] = alpha * (x[n] - y[n-1]) = alpha * diff
...where y is the filtered angle, x is the raw angle, and alpha<1 is analogous to a time constant, as alpha=1 corresponds to the no-filter case, and the frequency cutoff of the low-pass filter gets lowered as alpha approaches zero. An acute eye would probably have noticed by now that this corresponds to a simple Proportional Controller.
Such a filter allows the compensation of the wraparound of the angle value because we can add or subtract 360 to diff so as to ensure that abs(diff)<=180, which in turn ensures that the filtered angle value will always increase/decrease in the optimal direction to reach its "setpoint".
An example function call, which is to be scheduled periodically, that calculates a filtered angle value y for a given raw angle value x, could be something like this:
private float restrictAngle(float tmpAngle){
while(tmpAngle>=180) tmpAngle-=360;
while(tmpAngle<-180) tmpAngle+=360;
return tmpAngle;
}
//x is a raw angle value from getOrientation(...)
//y is the current filtered angle value
private float calculateFilteredAngle(float x, float y){
final float alpha = 0.1f;
float diff = x-y;
//here, we ensure that abs(diff)<=180
diff = restrictAngle(diff);
y += alpha*diff;
//ensure that y stays within [-180, 180[ bounds
y = restrictAngle(y);
return y;
}
The function calculateFilteredAngle(float x, float y) can then be called periodically using something like this (example for azimuth angle from getOrientation(...) function:
filteredAzimuth = calculateFilteredAngle(azimuth, filteredAzimuth);
Using this method, the filter would not misbehave like the averaging filter as mentioned by the OP.
As I could not load the .apk uploaded by the OP, I decided to implement my own test project in order to see if the corrections work. Here is the entire code (it does not use a .XML for the main layout, so I did not include it). Simply copy it to a test project to see if it works on a specific device (tested functional on a HTC Desire w/ Android v. 2.1):
File 1: Compass3DActivity.java:
package com.epichorns.compass3D;
import android.app.Activity;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Bundle;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
public class Compass3DActivity extends Activity {
//Textviews for showing angle data
TextView mTextView_azimuth;
TextView mTextView_pitch;
TextView mTextView_roll;
TextView mTextView_filtered_azimuth;
TextView mTextView_filtered_pitch;
TextView mTextView_filtered_roll;
float mAngle0_azimuth=0;
float mAngle1_pitch=0;
float mAngle2_roll=0;
float mAngle0_filtered_azimuth=0;
float mAngle1_filtered_pitch=0;
float mAngle2_filtered_roll=0;
private Compass3DView mCompassView;
private SensorManager sensorManager;
//sensor calculation values
float[] mGravity = null;
float[] mGeomagnetic = null;
float Rmat[] = new float[9];
float Imat[] = new float[9];
float orientation[] = new float[3];
SensorEventListener mAccelerometerListener = new SensorEventListener(){
public void onAccuracyChanged(Sensor sensor, int accuracy) {}
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER){
mGravity = event.values.clone();
processSensorData();
}
}
};
SensorEventListener mMagnetometerListener = new SensorEventListener(){
public void onAccuracyChanged(Sensor sensor, int accuracy) {}
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD){
mGeomagnetic = event.values.clone();
processSensorData();
update();
}
}
};
private float restrictAngle(float tmpAngle){
while(tmpAngle>=180) tmpAngle-=360;
while(tmpAngle<-180) tmpAngle+=360;
return tmpAngle;
}
//x is a raw angle value from getOrientation(...)
//y is the current filtered angle value
private float calculateFilteredAngle(float x, float y){
final float alpha = 0.3f;
float diff = x-y;
//here, we ensure that abs(diff)<=180
diff = restrictAngle(diff);
y += alpha*diff;
//ensure that y stays within [-180, 180[ bounds
y = restrictAngle(y);
return y;
}
public void processSensorData(){
if (mGravity != null && mGeomagnetic != null) {
boolean success = SensorManager.getRotationMatrix(Rmat, Imat, mGravity, mGeomagnetic);
if (success) {
SensorManager.getOrientation(Rmat, orientation);
mAngle0_azimuth = (float)Math.toDegrees((double)orientation[0]); // orientation contains: azimut, pitch and roll
mAngle1_pitch = (float)Math.toDegrees((double)orientation[1]); //pitch
mAngle2_roll = -(float)Math.toDegrees((double)orientation[2]); //roll
mAngle0_filtered_azimuth = calculateFilteredAngle(mAngle0_azimuth, mAngle0_filtered_azimuth);
mAngle1_filtered_pitch = calculateFilteredAngle(mAngle1_pitch, mAngle1_filtered_pitch);
mAngle2_filtered_roll = calculateFilteredAngle(mAngle2_roll, mAngle2_filtered_roll);
}
mGravity=null; //oblige full new refresh
mGeomagnetic=null; //oblige full new refresh
}
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
LinearLayout ll = new LinearLayout(this);
LinearLayout.LayoutParams llParams = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.FILL_PARENT, LinearLayout.LayoutParams.FILL_PARENT);
ll.setLayoutParams(llParams);
ll.setOrientation(LinearLayout.VERTICAL);
ViewGroup.LayoutParams txtParams = new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
mTextView_azimuth = new TextView(this);
mTextView_azimuth.setLayoutParams(txtParams);
mTextView_pitch = new TextView(this);
mTextView_pitch.setLayoutParams(txtParams);
mTextView_roll = new TextView(this);
mTextView_roll.setLayoutParams(txtParams);
mTextView_filtered_azimuth = new TextView(this);
mTextView_filtered_azimuth.setLayoutParams(txtParams);
mTextView_filtered_pitch = new TextView(this);
mTextView_filtered_pitch.setLayoutParams(txtParams);
mTextView_filtered_roll = new TextView(this);
mTextView_filtered_roll.setLayoutParams(txtParams);
mCompassView = new Compass3DView(this);
ViewGroup.LayoutParams compassParams = new ViewGroup.LayoutParams(200,200);
mCompassView.setLayoutParams(compassParams);
ll.addView(mCompassView);
ll.addView(mTextView_azimuth);
ll.addView(mTextView_pitch);
ll.addView(mTextView_roll);
ll.addView(mTextView_filtered_azimuth);
ll.addView(mTextView_filtered_pitch);
ll.addView(mTextView_filtered_roll);
setContentView(ll);
sensorManager = (SensorManager) this.getSystemService(Context.SENSOR_SERVICE);
sensorManager.registerListener(mAccelerometerListener, sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_UI);
sensorManager.registerListener(mMagnetometerListener, sensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD), SensorManager.SENSOR_DELAY_UI);
update();
}
#Override
public void onDestroy(){
super.onDestroy();
sensorManager.unregisterListener(mAccelerometerListener);
sensorManager.unregisterListener(mMagnetometerListener);
}
private void update(){
mCompassView.changeAngles(mAngle1_filtered_pitch, mAngle2_filtered_roll, mAngle0_filtered_azimuth);
mTextView_azimuth.setText("Azimuth: "+String.valueOf(mAngle0_azimuth));
mTextView_pitch.setText("Pitch: "+String.valueOf(mAngle1_pitch));
mTextView_roll.setText("Roll: "+String.valueOf(mAngle2_roll));
mTextView_filtered_azimuth.setText("Azimuth: "+String.valueOf(mAngle0_filtered_azimuth));
mTextView_filtered_pitch.setText("Pitch: "+String.valueOf(mAngle1_filtered_pitch));
mTextView_filtered_roll.setText("Roll: "+String.valueOf(mAngle2_filtered_roll));
}
}
File 2: Compass3DView.java:
package com.epichorns.compass3D;
import android.content.Context;
import android.opengl.GLSurfaceView;
public class Compass3DView extends GLSurfaceView {
private Compass3DRenderer mRenderer;
public Compass3DView(Context context) {
super(context);
mRenderer = new Compass3DRenderer(context);
setRenderer(mRenderer);
}
public void changeAngles(float angle0, float angle1, float angle2){
mRenderer.setAngleX(angle0);
mRenderer.setAngleY(angle1);
mRenderer.setAngleZ(angle2);
}
}
File 3: Compass3DRenderer.java:
package com.epichorns.compass3D;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.opengl.GLSurfaceView;
public class Compass3DRenderer implements GLSurfaceView.Renderer {
Context mContext;
// a raw buffer to hold indices
ShortBuffer _indexBuffer;
// raw buffers to hold the vertices
FloatBuffer _vertexBuffer0;
FloatBuffer _vertexBuffer1;
FloatBuffer _vertexBuffer2;
FloatBuffer _vertexBuffer3;
FloatBuffer _vertexBuffer4;
FloatBuffer _vertexBuffer5;
int _numVertices = 3; //standard triangle vertices = 3
FloatBuffer _textureBuffer0123;
//private FloatBuffer _light0Position;
//private FloatBuffer _light0Ambient;
float _light0Position[] = new float[]{10.0f, 10.0f, 10.0f, 0.0f};
float _light0Ambient[] = new float[]{0.05f, 0.05f, 0.05f, 1.0f};
float _light0Diffuse[] = new float[]{0.5f, 0.5f, 0.5f, 1.0f};
float _light0Specular[] = new float[]{0.7f, 0.7f, 0.7f, 1.0f};
float _matAmbient[] = new float[] { 0.6f, 0.6f, 0.6f, 1.0f };
float _matDiffuse[] = new float[] { 0.6f, 0.6f, 0.6f, 1.0f };
private float _angleX=0f;
private float _angleY=0f;
private float _angleZ=0f;
Compass3DRenderer(Context context){
super();
mContext = context;
}
public void setAngleX(float angle) {
_angleX = angle;
}
public void setAngleY(float angle) {
_angleY = angle;
}
public void setAngleZ(float angle) {
_angleZ = angle;
}
FloatBuffer InitFloatBuffer(float[] src){
ByteBuffer bb = ByteBuffer.allocateDirect(4*src.length);
bb.order(ByteOrder.nativeOrder());
FloatBuffer inBuf = bb.asFloatBuffer();
inBuf.put(src);
return inBuf;
}
ShortBuffer InitShortBuffer(short[] src){
ByteBuffer bb = ByteBuffer.allocateDirect(2*src.length);
bb.order(ByteOrder.nativeOrder());
ShortBuffer inBuf = bb.asShortBuffer();
inBuf.put(src);
return inBuf;
}
//Init data for our rendered pyramid
private void initTriangles() {
//Side faces triangles
float[] coords = {
-0.25f, -0.5f, 0.25f,
0.25f, -0.5f, 0.25f,
0f, 0.5f, 0f
};
float[] coords1 = {
0.25f, -0.5f, 0.25f,
0.25f, -0.5f, -0.25f,
0f, 0.5f, 0f
};
float[] coords2 = {
0.25f, -0.5f, -0.25f,
-0.25f, -0.5f, -0.25f,
0f, 0.5f, 0f
};
float[] coords3 = {
-0.25f, -0.5f, -0.25f,
-0.25f, -0.5f, 0.25f,
0f, 0.5f, 0f
};
//Base triangles
float[] coords4 = {
-0.25f, -0.5f, 0.25f,
0.25f, -0.5f, -0.25f,
0.25f, -0.5f, 0.25f
};
float[] coords5 = {
-0.25f, -0.5f, 0.25f,
-0.25f, -0.5f, -0.25f,
0.25f, -0.5f, -0.25f
};
float[] textures0123 = {
// Mapping coordinates for the vertices (UV mapping CW)
0.0f, 0.0f, // bottom left
1.0f, 0.0f, // bottom right
0.5f, 1.0f, // top ctr
};
_vertexBuffer0 = InitFloatBuffer(coords);
_vertexBuffer0.position(0);
_vertexBuffer1 = InitFloatBuffer(coords1);
_vertexBuffer1.position(0);
_vertexBuffer2 = InitFloatBuffer(coords2);
_vertexBuffer2.position(0);
_vertexBuffer3 = InitFloatBuffer(coords3);
_vertexBuffer3.position(0);
_vertexBuffer4 = InitFloatBuffer(coords4);
_vertexBuffer4.position(0);
_vertexBuffer5 = InitFloatBuffer(coords5);
_vertexBuffer5.position(0);
_textureBuffer0123 = InitFloatBuffer(textures0123);
_textureBuffer0123.position(0);
short[] indices = {0, 1, 2};
_indexBuffer = InitShortBuffer(indices);
_indexBuffer.position(0);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
gl.glEnable(GL10.GL_CULL_FACE); // enable the differentiation of which side may be visible
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glFrontFace(GL10.GL_CCW); // which is the front? the one which is drawn counter clockwise
gl.glCullFace(GL10.GL_BACK); // which one should NOT be drawn
initTriangles();
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
public void onDrawFrame(GL10 gl) {
gl.glPushMatrix();
gl.glClearColor(0, 0, 0, 1.0f); //clipping backdrop color
// clear the color buffer to show the ClearColor we called above...
gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
// set rotation
gl.glRotatef(_angleY, 0f, 1f, 0f); //ROLL
gl.glRotatef(_angleX, 1f, 0f, 0f); //ELEVATION
gl.glRotatef(_angleZ, 0f, 0f, 1f); //AZIMUTH
//Draw our pyramid
//4 side faces
gl.glColor4f(0.5f, 0f, 0f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer0);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glColor4f(0.5f, 0.5f, 0f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer1);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glColor4f(0f, 0.5f, 0f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer2);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glColor4f(0f, 0.5f, 0.5f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer3);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
//Base face
gl.glColor4f(0f, 0f, 0.5f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer4);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer5);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glPopMatrix();
}
public void onSurfaceChanged(GL10 gl, int w, int h) {
gl.glViewport(0, 0, w, h);
gl.glViewport(0, 0, w, h);
}
}
Please note that this code does not compensate for tablet default landscape orientation, so it is only expected to work correctly on a phone (I didn't have a tablet close by to test any correction code).
You should probably try a longer delay like Game and/or keep/increase the size of your circular buffer. The sensors (accelerometer, compass, etc.) on mobile devices are inherently noisy so when I asked about 'low pass filter', I meant do you use more data to decrease the frequency of your app usable updates. Your video was done inside, I would also recommend going to a place with less EM interference such as a park just to check that the behavior is consistent as well as the standard compass reset action (rotate device in figure-8). In the end you may have to apply some heuristics to throw out the 'bad' data to make a smoother experience for the user.
Well I had exactly the same problem as I was retrieving orientation. Thing is that I didn't get is solved (I had to set a constraint when it comes to the device position when retrieving it), and I don't know if you'll ever be able to.
Pick a magnetical compass and try to get north orientation when the compass is in the situation you describe - you will get the same non-sense results. So you can't really expect the device's compass to do it any better !
Few words about filtering, with your permissions.
I would suggest to do averaging on Magnetic Field Vector itself before turning it into angles.
It is wrong to do averaging/smoothing only on angles without use of some sort of magnitude. Angles themselves are not providing enough data to detect direction/heading/bearing.
Example: When you want to know average wind direction during the whole day you must use the strength of the wind, not just only angles. If you will average only angles you will get absolutely wrong wind direction.
As for bearing direction I would use the speed for magnitude.
I'm trying to learn how to use LWJGL libraries. Took me forever to find lessons. I eventually stumbled upon the NEHE lessons that teach OpenGL across many programming languages. I downloaded the LWJGL version of Lesson07 and noticed it was using DevIL for the images.
I asked a question on here earlier on what i should use instead of DevIL, and an informative user suggested that i go with Slick instead. I got past Lesson06 after tweaking it for a while, but now i am seriously stuck on Lesson07.
Specifically HERE:
private int[] loadTexture(String path) {
IntBuffer image = ByteBuffer.allocateDirect(4).order(ByteOrder.nativeOrder()).asIntBuffer();
IL.ilGenImages(1, image);
IL.ilBindImage(image.get(0));
IL.ilLoadImage(path);
IL.ilConvertImage(IL.IL_RGB, IL.IL_BYTE);
ByteBuffer scratch = ByteBuffer.allocateDirect(IL.ilGetInteger(IL.IL_IMAGE_WIDTH) * IL.ilGetInteger(IL.IL_IMAGE_HEIGHT) * 3);
IL.ilCopyPixels(0, 0, 0, IL.ilGetInteger(IL.IL_IMAGE_WIDTH), IL.ilGetInteger(IL.IL_IMAGE_HEIGHT), 1, IL.IL_RGB, IL.IL_BYTE, scratch);
// Create A IntBuffer For Image Address In Memory
IntBuffer buf = ByteBuffer.allocateDirect(12).order(ByteOrder.nativeOrder()).asIntBuffer();
GL11.glGenTextures(buf); // Create Texture In OpenGL
GL11.glBindTexture(GL11.GL_TEXTURE_2D, buf.get(0));
// Typical Texture Generation Using Data From The Image
// Create Nearest Filtered Texture
GL11.glBindTexture(GL11.GL_TEXTURE_2D, buf.get(0));
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MAG_FILTER, GL11.GL_NEAREST);
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MIN_FILTER, GL11.GL_NEAREST);
GL11.glTexImage2D(GL11.GL_TEXTURE_2D, 0, GL11.GL_RGB, IL.ilGetInteger(IL.IL_IMAGE_WIDTH),
IL.ilGetInteger(IL.IL_IMAGE_HEIGHT), 0, GL11.GL_RGB, GL11.GL_UNSIGNED_BYTE, scratch);
// Create Linear Filtered Texture
GL11.glBindTexture(GL11.GL_TEXTURE_2D, buf.get(1));
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MAG_FILTER, GL11.GL_LINEAR);
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MIN_FILTER, GL11.GL_LINEAR);
GL11.glTexImage2D(GL11.GL_TEXTURE_2D, 0, GL11.GL_RGB, IL.ilGetInteger(IL.IL_IMAGE_WIDTH),
IL.ilGetInteger(IL.IL_IMAGE_HEIGHT), 0, GL11.GL_RGB, GL11.GL_UNSIGNED_BYTE, scratch);
// Create MipMapped Texture
GL11.glBindTexture(GL11.GL_TEXTURE_2D, buf.get(2));
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MAG_FILTER, GL11.GL_LINEAR);
GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MIN_FILTER, GL11.GL_LINEAR_MIPMAP_NEAREST);
GLU.gluBuild2DMipmaps(GL11.GL_TEXTURE_2D, 3, IL.ilGetInteger(IL.IL_IMAGE_WIDTH),
IL.ilGetInteger(IL.IL_IMAGE_HEIGHT), GL11.GL_RGB, GL11.GL_UNSIGNED_BYTE, scratch);
return new int[]{ buf.get(0), buf.get(1), buf.get(2) }; // Return Image Addresses In Memory
}
I've tried to get it working, tried substituting Slick stuff in, yet all i have tried has either crashed or led to a black screen.
What is the best way to do this with Slick?
This one was rather annoying. I think it was easier to convert than Lesson 06, but that could be because I was at least a little more used to it than I was in 06. It still took me all night to get this to work and I didn't figure it out until morning, though.
Here's my entire code. Lines are commented on the most annoying parts, along with some CODE commented to show what DIDN'T work. Any such code that was put in a comment is probably very fragmented so I'm not sure if you'll understand what I was trying to do, but I left it in anyway. The two places that I got stuck at are the two places with helpful comments and links.
package LWJGLTest2;
import java.awt.Image;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.awt.image.DataBufferInt;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.util.glu.GLU.*;
import java.io.IOException;
import java.net.URL;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.logging.FileHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.imageio.ImageIO;
import javax.swing.ImageIcon;
import org.lwjgl.BufferUtils;
import org.lwjgl.LWJGLException;
import org.lwjgl.input.Keyboard;
import org.lwjgl.input.Mouse;
import org.lwjgl.opengl.Display;
import org.lwjgl.opengl.DisplayMode;
import org.lwjgl.util.glu.GLU;
import org.newdawn.slick.opengl.Texture;
import org.newdawn.slick.opengl.TextureLoader;
import org.newdawn.slick.util.ResourceLoader;
/**
*
* #author Jay
*/
public class LWJGLTest2 {
public static final int WIDTH = 640;
public static final int HEIGHT = 480;
public static final Logger LOGGER = Logger.getLogger(LWJGLTest2.class.getName());
float xrot;
float yrot;
float xspeed;
float yspeed;
float zpos = -5f;
boolean lp;
boolean fp;
boolean light;
final float[] AMBIENT = {.5f, .5f, .5f, 1f};
final float[] DIFFUSE = {1f, 1f, 1f, 1f};
final float[] LPOSITION = {0f, 0f, 2f, 1f};
int filter;
BufferedImage textureImage;
Texture texture[] = new Texture[3];
static {
try {
LOGGER.addHandler(new FileHandler("errors.log",true));
}
catch(IOException ex) {
LOGGER.log(Level.WARNING,ex.toString(),ex);
}
}
public static void main(String[] args) {
LWJGLTest2 main = null;
try {
main = new LWJGLTest2();
main.create();
main.run();
} catch (Exception ex) {
LOGGER.log(Level.SEVERE, ex.toString(), ex);
} finally {
if (main != null) {
main.destroy();
}
}
}
public void create() throws LWJGLException {
// Display
Display.setDisplayMode(new DisplayMode(WIDTH, HEIGHT));
Display.setFullscreen(false);
Display.setTitle("LWJGL Test");
Display.create();
// Keyboard
Keyboard.create();
// Mouse
Mouse.setGrabbed(false);
Mouse.create();
// OpenGL
initGL();
resizeGL();
}
public void destroy() {
Mouse.destroy();
Keyboard.destroy();
Display.destroy();
}
public void initGL() {
try {
loadTextures();
} catch (IOException ex) {
System.err.println(ex); System.exit(0);
}
glEnable(GL_TEXTURE_2D);
glShadeModel(GL_SMOOTH);
glClearColor(0f, 0f, 0f, 0.5f);
glClearDepth(1f);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
//FloatBuffer scratch = BufferUtils.createFloatBuffer(8); // 4 for the values and 4 for the extra buffer
//scratch.put(AMBIENT);
ByteBuffer scratch = ByteBuffer.allocateDirect(16);
scratch.order(ByteOrder.nativeOrder());
//scratch.put(AMBIENT);
glLight(GL_LIGHT1, GL_AMBIENT, (FloatBuffer)scratch.asFloatBuffer().put(AMBIENT).flip()); // 3rd argument used to be only scratch
//scratch = ByteBuffer.allocateDirect(32).asFloatBuffer(); // reset the buffer to prevent an overflow
//scratch.put(DIFFUSE);
glLight(GL_LIGHT1, GL_DIFFUSE, (FloatBuffer)scratch.asFloatBuffer().put(AMBIENT).flip());
//scratch = ByteBuffer.allocateDirect(32).asFloatBuffer();
//scratch.put(LPOSITION);
glLight(GL_LIGHT1, GL_POSITION, (FloatBuffer)scratch.asFloatBuffer().put(AMBIENT).flip());
glEnable(GL_LIGHT1);
}
public void resizeGL() {
glViewport(0, 0, WIDTH, HEIGHT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45f, (float)WIDTH/(float)HEIGHT, 0.1f, 100f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
public void run() {
while (!Display.isCloseRequested() && !Keyboard.isKeyDown(Keyboard.KEY_ESCAPE)) {
if (Display.isVisible()) {
checkInput();
render();
Display.update();
}
}
}
public void checkInput() {
if (Keyboard.isKeyDown(Keyboard.KEY_L) && !lp) {
lp = true;
light = !light;
if (light)
glEnable(GL_LIGHTING);
else
glDisable(GL_LIGHTING);
}
if (!Keyboard.isKeyDown(Keyboard.KEY_L))
lp = false;
if (Keyboard.isKeyDown(Keyboard.KEY_F) && !fp) {
fp = true;
filter = (filter + 1) % 3;
}
if (!Keyboard.isKeyDown(Keyboard.KEY_F))
fp = false;
if (Keyboard.isKeyDown(Keyboard.KEY_PRIOR))
zpos -= 0.02f;
if (Keyboard.isKeyDown(Keyboard.KEY_NEXT))
zpos += 0.02f;
if (Keyboard.isKeyDown(Keyboard.KEY_UP))
xspeed -= 0.001f;
if (Keyboard.isKeyDown(Keyboard.KEY_DOWN))
xspeed += 0.001f;
if (Keyboard.isKeyDown(Keyboard.KEY_RIGHT))
yspeed -= 0.001f;
if (Keyboard.isKeyDown(Keyboard.KEY_LEFT))
yspeed += 0.001f;
}
public void render() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glTranslatef(0f, 0f, zpos);
glRotatef(xrot, 1f, 0f, 0f);
glRotatef(yrot, 0f, 1f, 0f);
texture[filter].bind();
glBegin(GL_QUADS);
// Front Face
glNormal3f(0f, 0f, 1f);
glTexCoord2f(0.0f, 0.0f); glVertex3f(-1.0f, -1.0f, 1.0f); // Bottom Left Of The Texture and Quad
glTexCoord2f(1.0f, 0.0f); glVertex3f( 1.0f, -1.0f, 1.0f); // Bottom Right Of The Texture and Quad
glTexCoord2f(1.0f, 1.0f); glVertex3f( 1.0f, 1.0f, 1.0f); // Top Right Of The Texture and Quad
glTexCoord2f(0.0f, 1.0f); glVertex3f(-1.0f, 1.0f, 1.0f); // Top Left Of The Texture and Quad
// Back Face
glNormal3f(0f, 0f, -1f);
glTexCoord2f(1.0f, 0.0f); glVertex3f(-1.0f, -1.0f, -1.0f); // Bottom Right Of The Texture and Quad
glTexCoord2f(1.0f, 1.0f); glVertex3f(-1.0f, 1.0f, -1.0f); // Top Right Of The Texture and Quad
glTexCoord2f(0.0f, 1.0f); glVertex3f( 1.0f, 1.0f, -1.0f); // Top Left Of The Texture and Quad
glTexCoord2f(0.0f, 0.0f); glVertex3f( 1.0f, -1.0f, -1.0f); // Bottom Left Of The Texture and Quad
// Top Face
glNormal3f(0f, 1f, 0f);
glTexCoord2f(0.0f, 1.0f); glVertex3f(-1.0f, 1.0f, -1.0f); // Top Left Of The Texture and Quad
glTexCoord2f(0.0f, 0.0f); glVertex3f(-1.0f, 1.0f, 1.0f); // Bottom Left Of The Texture and Quad
glTexCoord2f(1.0f, 0.0f); glVertex3f( 1.0f, 1.0f, 1.0f); // Bottom Right Of The Texture and Quad
glTexCoord2f(1.0f, 1.0f); glVertex3f( 1.0f, 1.0f, -1.0f); // Top Right Of The Texture and Quad
// Bottom Face
glNormal3f(0f, -1f, 0f);
glTexCoord2f(1.0f, 1.0f); glVertex3f(-1.0f, -1.0f, -1.0f); // Top Right Of The Texture and Quad
glTexCoord2f(0.0f, 1.0f); glVertex3f( 1.0f, -1.0f, -1.0f); // Top Left Of The Texture and Quad
glTexCoord2f(0.0f, 0.0f); glVertex3f( 1.0f, -1.0f, 1.0f); // Bottom Left Of The Texture and Quad
glTexCoord2f(1.0f, 0.0f); glVertex3f(-1.0f, -1.0f, 1.0f); // Bottom Right Of The Texture and Quad
// Right face
glNormal3f(1f, 0f, 0f);
glTexCoord2f(1.0f, 0.0f); glVertex3f( 1.0f, -1.0f, -1.0f); // Bottom Right Of The Texture and Quad
glTexCoord2f(1.0f, 1.0f); glVertex3f( 1.0f, 1.0f, -1.0f); // Top Right Of The Texture and Quad
glTexCoord2f(0.0f, 1.0f); glVertex3f( 1.0f, 1.0f, 1.0f); // Top Left Of The Texture and Quad
glTexCoord2f(0.0f, 0.0f); glVertex3f( 1.0f, -1.0f, 1.0f); // Bottom Left Of The Texture and Quad
// Left Face
glNormal3f(-1f, 0f, 0f);
glTexCoord2f(0.0f, 0.0f); glVertex3f(-1.0f, -1.0f, -1.0f); // Bottom Left Of The Texture and Quad
glTexCoord2f(1.0f, 0.0f); glVertex3f(-1.0f, -1.0f, 1.0f); // Bottom Right Of The Texture and Quad
glTexCoord2f(1.0f, 1.0f); glVertex3f(-1.0f, 1.0f, 1.0f); // Top Right Of The Texture and Quad
glTexCoord2f(0.0f, 1.0f); glVertex3f(-1.0f, 1.0f, -1.0f); // Top Left Of The Texture and Quad
glEnd();
xrot += xspeed;
yrot += yspeed;
}
public void loadTextures() throws IOException {
//textureImage = ImageIO.read(this.getClass().getResourceAsStream("Crate.png"));
texture[0] = TextureLoader.getTexture("BMP", this.getClass().getResourceAsStream("Crate.bmp"), true, GL_NEAREST);
texture[1] = TextureLoader.getTexture("BMP", this.getClass().getResourceAsStream("Crate.bmp"), true, GL_LINEAR);
texture[2] = TextureLoader.getTexture("BMP", this.getClass().getResourceAsStream("Crate.bmp"), true);
//ByteBuffer scratch = ByteBuffer.wrap(((DataBufferByte)textureImage.getRaster().getDataBuffer()).getData());
//ByteBuffer scratch = ByteBuffer.allocateDirect(((DataBufferByte)textureImage.getRaster().getDataBuffer()).getData().length + 3);
/*
* The +3 in the last statement is important! Without it, you get this:
* SEVERE: java.lang.IllegalArgumentException: Number of remaining buffer
* elements is 0, must be at least 3. Because at most 3 elements can be
* returned, a buffer with at least 3 elements is required, regardless of
* actual returned element count
*/
//scratch.put(((DataBufferByte)textureImage.getRaster().getDataBuffer()).getData()); // RED
//scratch.rewind(); // NOW IT'S RED.
//for (int i = 0; i < scratch.limit(); ++i)
// System.out.println(scratch);
texture[2].bind();
//ByteBuffer scratch = ByteBuffer.allocateDirect(texture[2].getTextureData().length);
//scratch.order(ByteOrder.nativeOrder());
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_NEAREST);
//GLU.gluBuild2DMipmaps(GL_TEXTURE_2D, 3, (int)texture[2].getWidth(), (int)texture[2].getHeight(), GL_RGB, GL_UNSIGNED_BYTE, (ByteBuffer)scratch.put(texture[2].getTextureData()).flip());
org.lwjgl.opengl.GL30.glGenerateMipmap(GL_TEXTURE_2D); /* http://slick.javaunlimited.net/viewtopic.php?t=2755
* Not sure if this worked properly, but I rather enjoy
* not crying so I'll just smile and nod.
* I went through a LOT of other options before I got this.
* I also tried using the trick I found at
* http://lwjgl.org/forum/index.php?action=printpage;topic=2233.0
* (used above, in initGL) but that didn't work either.
*/
}
}
This code has been run and FINALLY works. Using Slick-Util compresses it a LOT. You'll also notice that I changed the rate of change for the speeds; 0.01f was way too fast, so I put it to 0.001f instead. Hope this helps!
Links from my source code:
"Mipmapping"
"Why is adding of the light in lwjgl so difficult?"
You'll probably need to adapt the code to use the standard LWJGL image / texture loading features, at least up to the point of the glBindTexture() call. After that point I think you should be OK, it's all pretty standard OpenGL from that point.
There is some pretty decent example code here that demonstrates what you need to do:
http://lwjgl.org/wiki/index.php?title=Slick-Util_Library_-_Part_1_-_Loading_Images_for_LWJGL
I think the key lines of code you need are:
import org.newdawn.slick.opengl.Texture;
import org.newdawn.slick.opengl.TextureLoader;
...
Texture texture = TextureLoader.getTexture("PNG", ResourceLoader.getResourceAsStream("res/image.png"));
...
GL11.glBind(texture.getTextureID());
In other words, I think it's actually a bit simpler than using DevIL.
I'm trying to get a spotlight working but it's not showing up could someone have a look and see where I've went wrong?
package water3;
import Common.TextureReader;
import com.sun.opengl.util.BufferUtil;
import java.io.IOException;
import javax.media.opengl.glu.GLUquadric;
import javax.media.opengl.GL;
import javax.media.opengl.GLAutoDrawable;
import javax.media.opengl.GLEventListener;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import javax.media.opengl.glu.GLU;
class Renderer implements GLEventListener {
private GLUquadric quadric;
private Object3D object3D;
private float[] LightPos = {0.0f, 5.0f, -4.0f, 1.0f}; // Light Position
private boolean LightUp, LightDown, LightLeft, LightRight, LightForward, LightBackward;
private int[] textures = new int[3]; // Storage For 3 Textures
double aNum = 1;
private boolean aDown =false;
private boolean up =false;
private GLU glu = new GLU();
public void init(GLAutoDrawable drawable) {
GL gl = drawable.getGL();
try {
loadGLTextures(drawable);
} catch (IOException e) {
System.out.println("Couldn't load model/Texture");
throw new RuntimeException(e);
}
/*
gl.glShadeModel(GL.GL_SMOOTH); // Enable Smooth Shading
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.5f); // Black Background
gl.glClearDepth(1.0f); // Depth Buffer Setup
gl.glClearStencil(0); // Stencil Buffer Setup
gl.glEnable(GL.GL_DEPTH_TEST); // Enables Depth Testing
gl.glDepthFunc(GL.GL_LEQUAL); // The Type Of Depth Testing To Do
gl.glHint(GL.GL_PERSPECTIVE_CORRECTION_HINT, GL.GL_NICEST); // Really Nice Perspective Calculations
/* gl.glLightfv(GL.GL_LIGHT1, GL.GL_POSITION, LightPos, 0); // Set Light1 Position
gl.glLightfv(GL.GL_LIGHT1, GL.GL_AMBIENT, LightAmb, 0); // Set Light1 Ambience
gl.glLightfv(GL.GL_LIGHT1, GL.GL_DIFFUSE, LightDif, 0); // Set Light1 Diffuse
gl.glLightfv(GL.GL_LIGHT1, GL.GL_SPECULAR, LightSpc, 0); // Set Light1 Specular
gl.glEnable(GL.GL_LIGHT1);
*/// Enable Light1
// Enable Lighting
/*
gl.glMaterialfv(GL.GL_FRONT, GL.GL_AMBIENT, MatAmb, 0); // Set Material Ambience
gl.glMaterialfv(GL.GL_FRONT, GL.GL_DIFFUSE, MatDif, 0); // Set Material Diffuse
gl.glMaterialfv(GL.GL_FRONT, GL.GL_SPECULAR, MatSpc, 0); // Set Material Specular
gl.glMaterialfv(GL.GL_FRONT, GL.GL_SHININESS, MatShn, 0); // Set Material Shininess
*/
gl.glClearColor(0,0,0,1);
gl.glEnable(GL.GL_LIGHT0);
gl.glEnable(GL.GL_DEPTH_TEST);
gl.glShadeModel(GL.GL_SMOOTH);
gl.glLightModeli(GL.GL_LIGHT_MODEL_TWO_SIDE, GL.GL_TRUE);
gl.glCullFace(GL.GL_BACK); // Set Culling Face To Back Face
gl.glEnable(GL.GL_CULL_FACE); // Enable Culling
gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // Set Clear Color (Greenish Color)
quadric = glu.gluNewQuadric(); // Initialize Quadratic
glu.gluQuadricNormals(quadric, GL.GL_SMOOTH); // Enable Smooth Normal Generation
glu.gluQuadricTexture(quadric, false);
}
public void display(GLAutoDrawable drawable) {
GL gl = drawable.getGL();
// Clear Color Buffer, Depth Buffer, Stencil Buffer
gl.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT | GL.GL_STENCIL_BUFFER_BIT);
gl.glLoadIdentity();
SetSpotlight(gl);
SetFloorMaterial(gl);
gl.glPushMatrix();
// Reset Modelview Matrix
gl.glTranslatef(0.0f, 0.0f, -20.0f); // Zoom Into The Screen 20 Units
gl.glEnable(GL.GL_TEXTURE_2D); // Enable Texture Mapping ( NEW )
drawRoom(gl); // Draw The Room
gl.glPopMatrix();
gl.glFlush(); // Flush The OpenGL Pipeline
}
private void drawRoom(GL gl) { // Draw The Room (Box)
gl.glBegin(GL.GL_QUADS); // Begin Drawing Quads
// Floor
gl.glNormal3f(0.0f, 1.0f, 0.0f); // Normal Pointing Up
gl.glVertex3f(-20.0f, -20.0f, -40.0f); // Back Left
gl.glVertex3f(-20.0f, -20.0f, 40.0f); // Front Left
gl.glVertex3f(20.0f, -20.0f, 40.0f); // Front Right
gl.glVertex3f(20.0f, -20.0f, -40.0f); // Back Right
// Ceiling
gl.glNormal3f(0.0f, -1.0f, 0.0f); // Normal Point Down
gl.glVertex3f(-10.0f, 10.0f, 20.0f); // Front Left
gl.glVertex3f(-10.0f, 10.0f, -20.0f); // Back Left
gl.glVertex3f(10.0f, 10.0f, -20.0f); // Back Right
gl.glVertex3f(10.0f, 10.0f, 20.0f); // Front Right
// Back Wall
gl.glNormal3f(0.0f, 0.0f, -1.0f); // Normal Pointing Towards Viewer
gl.glVertex3f(20.0f, 20.0f, 30.0f); // Top Right
gl.glVertex3f(20.0f, -20.0f, 30.0f); // Bottom Right
gl.glVertex3f(-20.0f, -20.0f, 30.0f); // Bottom Left
gl.glVertex3f(-20.0f, 20.0f, 30.0f); // Top Left
// Left Wall
gl.glNormal3f(1.0f, 0.0f, 0.0f); // Normal Pointing Right
gl.glVertex3f(-20.0f, 20.0f, 30.0f); // Top Front
gl.glVertex3f(-20.0f, -20.0f, 30.0f); // Bottom Front
gl.glVertex3f(-20.0f, -20.0f, -30.0f); // Bottom Back
gl.glVertex3f(-20.0f, 20.0f, -30.0f); // Top Back
// Right Wall
gl.glNormal3f(-1.0f, 0.0f, 0.0f); // Normal Pointing Left
gl.glVertex3f(20.0f, 20.0f, -30.0f); // Top Back
gl.glVertex3f(20.0f, -20.0f, -30.0f); // Bottom Back
gl.glVertex3f(20.0f, -20.0f, 30.0f); // Bottom Front
gl.glVertex3f(20.0f, 20.0f, 30.0f); // Top Front
gl.glPushMatrix();
// Front Wall
gl.glNormal3f(0.0f, 0.0f, 1.0f); // Normal Pointing Away From Viewer
gl.glTexCoord2f(1,1);
gl.glVertex3f(-20.0f, 20.0f, -30.0f); // Top Left
gl.glTexCoord2f(1,0);
gl.glVertex3f(-20.0f, -20.0f, -30.0f); // Bottom Left
gl.glTexCoord2f(0,0);
gl.glVertex3f(20.0f, -20.0f, -30.0f); // Bottom Right
gl.glTexCoord2f(0,1);
gl.glVertex3f(20.0f, 20.0f, -30.0f); // Top Right
gl.glPopMatrix();
gl.glEnd(); // Done Drawing Quads
}
public void reshape(GLAutoDrawable drawable,int xstart,int ystart,int width,int height) {
GL gl = drawable.getGL();
height = (height == 0) ? 1 : height;
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL.GL_PROJECTION);
gl.glLoadIdentity();
gl.glRotatef(90, 0.0f, 0.0f, 1.0f);
glu.gluPerspective(60, (float) width / height, 1, 1000);
glu.gluLookAt(1.0f,0.0f,25.0f,
0.0f,0.0f,0.0f,
0.0f,0.0f,1.0f);
gl.glMatrixMode(GL.GL_MODELVIEW);
gl.glLoadIdentity();
}
public void displayChanged(GLAutoDrawable drawable, boolean modeChanged,boolean deviceChanged) { }
private void loadGLTextures(GLAutoDrawable gldrawable) throws IOException {
TextureReader.Texture texture = null;
texture = TextureReader.readTexture("data/images/04.bmp");
GL gl = gldrawable.getGL();
//Create Nearest Filtered Texture
gl.glGenTextures(1, textures, 0);
gl.glBindTexture(GL.GL_TEXTURE_2D, textures[0]);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR);
gl.glTexImage2D(GL.GL_TEXTURE_2D,
0,
3,
texture.getWidth(),
texture.getHeight(),
0,
GL.GL_RGB,
GL.GL_UNSIGNED_BYTE,
texture.getPixels());
}
public void SetSpotlight(GL gl){
gl.glDisable(GL.GL_LIGHTING);
// prepare spotlight
float spot_ambient[] = {50.2f,0.0f,0.0f,1.0f };//white ={10.2f,10.2f,10.2f,1.0f };
float spot_diffuse[] = {50.8f,0.0f,0.0f,1.0f };
float spot_specular[] = {50.8f,0.0f,0.0f,1.0f };
// set colors here and do the geometry in draw
gl.glLightfv(GL.GL_LIGHT0, GL.GL_AMBIENT, spot_ambient,0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_DIFFUSE, spot_diffuse,0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_SPECULAR, spot_specular,0);
gl.glEnable(GL.GL_LIGHTING);
gl.glEnable(GL.GL_LIGHT0);
// set light position
// since ligth follows the model when mousing
// spotlight as it moves with the scene
float spot_position[] = {0.0f,60.0f,-30.0f,1.0f};
float spot_direction[] = {0.0f,0.0f,-1.0f};
float spot_angle = 10.0f;
gl.glLightfv(GL.GL_LIGHT0, GL.GL_POSITION, spot_position,0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_SPOT_DIRECTION,spot_direction,0);
gl.glLightf(GL.GL_LIGHT0, GL.GL_SPOT_CUTOFF,(float)spot_angle);
// "smoothing" the border of the lightcone
// change this for effect
gl.glMaterialfv(GL.GL_FRONT, GL.GL_AMBIENT_AND_DIFFUSE, new float[] {0.7f,0.7f,1}, 0 );
gl.glLighti(GL.GL_LIGHT0, GL.GL_SPOT_EXPONENT, 20);
gl.glEnable(GL.GL_LIGHTING);
}
public void SetFloorMaterial(GL gl){
float amb[]={0.3f,0.3f,0.0f,1.0f};
float diff[]={1.0f,1.0f,0.5f,1.0f};
float spec[]={0.6f,0.6f,0.5f,1.0f};
float shine=0.25f;
gl.glMaterialfv(GL.GL_FRONT,GL.GL_AMBIENT,amb,0);
gl.glMaterialfv(GL.GL_FRONT,GL.GL_DIFFUSE,diff,0);
gl.glMaterialfv(GL.GL_FRONT,GL.GL_SPECULAR,spec,0);
gl.glMaterialf(GL.GL_FRONT,GL.GL_SHININESS,shine*128.0f);
}
}
OpenGL's fixed function pipeline does illumination only at vertices and then just interpolates. Since the walls of your rooms are just very large quads, every wall has only 4 vertices, so only 4 lighting calculations happen. For a spotlight effect you'd need a lot more lighting calculations. So you've to refine (i.e. tesselate) the walls' meshes, or use per pixel lighting (i.e. do lighting calculations on the fragment level – this is done using shaders).