unable to render with array buffers in Java with LWJGL - java

I'm attempting to learn how to program in OpenGL the modern way, using vertex array/vertex buffer objects. I'm using the tutorials on the LWJGL wiki right now, and even if I copy & paste the tutorial code, I get a window with the background colour set properly but no shape rendered on top of it. The tutorial page shows a screenshot with a white rectangle rendered over the background. Is this a common issue, or is there any way I can get further information on my error?
Edit: using shaders and putting some colour on the vertices fixes the problem. I'm not posting this as an answer quite yet though, because I'm assuming the tutorial code was intended to work without the use of shaders. (which are in a later portion of the tutorial)
This is the code on the tutorial page:
import java.nio.FloatBuffer;
import org.lwjgl.BufferUtils;
import org.lwjgl.LWJGLException;
import org.lwjgl.opengl.ContextAttribs;
import org.lwjgl.opengl.Display;
import org.lwjgl.opengl.DisplayMode;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL15;
import org.lwjgl.opengl.GL20;
import org.lwjgl.opengl.GL30;
import org.lwjgl.opengl.PixelFormat;
import org.lwjgl.util.glu.GLU;
public class TheQuadExampleDrawArrays {
// Entry point for the application
public static void main(String[] args) {
new TheQuadExampleDrawArrays();
}
// Setup variables
private final String WINDOW_TITLE = "The Quad: glDrawArrays";
private final int WIDTH = 320;
private final int HEIGHT = 240;
// Quad variables
private int vaoId = 0;
private int vboId = 0;
private int vertexCount = 0;
public TheQuadExampleDrawArrays() {
// Initialize OpenGL (Display)
this.setupOpenGL();
this.setupQuad();
while (!Display.isCloseRequested()) {
// Do a single loop (logic/render)
this.loopCycle();
// Force a maximum FPS of about 60
Display.sync(60);
// Let the CPU synchronize with the GPU if GPU is tagging behind
Display.update();
}
// Destroy OpenGL (Display)
this.destroyOpenGL();
}
public void setupOpenGL() {
// Setup an OpenGL context with API version 3.2
try {
PixelFormat pixelFormat = new PixelFormat();
ContextAttribs contextAtrributes = new ContextAttribs(3, 2)
.withForwardCompatible(true)
.withProfileCore(true);
Display.setDisplayMode(new DisplayMode(WIDTH, HEIGHT));
Display.setTitle(WINDOW_TITLE);
Display.create(pixelFormat, contextAtrributes);
GL11.glViewport(0, 0, WIDTH, HEIGHT);
} catch (LWJGLException e) {
e.printStackTrace();
System.exit(-1);
}
// Setup an XNA like background color
GL11.glClearColor(0.4f, 0.6f, 0.9f, 0f);
// Map the internal OpenGL coordinate system to the entire screen
GL11.glViewport(0, 0, WIDTH, HEIGHT);
this.exitOnGLError("Error in setupOpenGL");
}
public void setupQuad() {
// OpenGL expects vertices to be defined counter clockwise by default
float[] vertices = {
// Left bottom triangle
-0.5f, 0.5f, 0f,
-0.5f, -0.5f, 0f,
0.5f, -0.5f, 0f,
// Right top triangle
0.5f, -0.5f, 0f,
0.5f, 0.5f, 0f,
-0.5f, 0.5f, 0f
};
// Sending data to OpenGL requires the usage of (flipped) byte buffers
FloatBuffer verticesBuffer = BufferUtils.createFloatBuffer(vertices.length);
verticesBuffer.put(vertices);
verticesBuffer.flip();
vertexCount = 6;
// Create a new Vertex Array Object in memory and select it (bind)
// A VAO can have up to 16 attributes (VBO's) assigned to it by default
vaoId = GL30.glGenVertexArrays();
GL30.glBindVertexArray(vaoId);
// Create a new Vertex Buffer Object in memory and select it (bind)
// A VBO is a collection of Vectors which in this case resemble the location of each vertex.
vboId = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vboId);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, verticesBuffer, GL15.GL_STATIC_DRAW);
// Put the VBO in the attributes list at index 0
GL20.glVertexAttribPointer(0, 3, GL11.GL_FLOAT, false, 0, 0);
// Deselect (bind to 0) the VBO
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
// Deselect (bind to 0) the VAO
GL30.glBindVertexArray(0);
this.exitOnGLError("Error in setupQuad");
}
public void loopCycle() {
GL11.glClear(GL11.GL_COLOR_BUFFER_BIT);
// Bind to the VAO that has all the information about the quad vertices
GL30.glBindVertexArray(vaoId);
GL20.glEnableVertexAttribArray(0);
// Draw the vertices
GL11.glDrawArrays(GL11.GL_TRIANGLES, 0, vertexCount);
// Put everything back to default (deselect)
GL20.glDisableVertexAttribArray(0);
GL30.glBindVertexArray(0);
this.exitOnGLError("Error in loopCycle");
}
public void destroyOpenGL() {
// Disable the VBO index from the VAO attributes list
GL20.glDisableVertexAttribArray(0);
// Delete the VBO
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
GL15.glDeleteBuffers(vboId);
// Delete the VAO
GL30.glBindVertexArray(0);
GL30.glDeleteVertexArrays(vaoId);
Display.destroy();
}
public void exitOnGLError(String errorMessage) {
int errorValue = GL11.glGetError();
if (errorValue != GL11.GL_NO_ERROR) {
String errorString = GLU.gluErrorString(errorValue);
System.err.println("ERROR - " + errorMessage + ": " + errorString);
if (Display.isCreated()) Display.destroy();
System.exit(-1);
}
}
}

This is a late answer but since the Z coordinate is 0 on all of your vertices shouldn't it be at the exact position of the near clipping plane and thus not render because it is out of range, so you should try moving the vertices back along the Z axis and it should theoretically render.

Related

LWJGL Vertex data seems corrupted

I am trying to render a triangle to see how LWJGL works. Each frame, I reset the vertex data ByteBuffer and write 3 vertices to it directly. Then I call buffer.flip() to ready the data to be uploaded to the GPU and call glBufferData(...) and finally glDrawArrays(...), but no triangle shows. Using the debug program RenderDoc I was able to look at the vertex data that was supposedly uploaded and it definitely doesn't seem right.
As you can see, each position is extremely small (like 41 zero's after the the .). I don't see any errors, even with the GLFW error callbacks and debug context set up.
All Java code:
import org.lwjgl.glfw.GLFW;
import org.lwjgl.glfw.GLFWErrorCallback;
import org.lwjgl.opengl.GL;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL30;
import org.lwjgl.opengl.GLUtil;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
public class MinimalExample {
private static void debugPrintErrors() {
System.out.println("-> DEBUG PRINT ERRORS");
int error;
while ((error = GL30.glGetError()) != GL30.GL_NO_ERROR) {
StringBuilder b = new StringBuilder(" ");
switch (error) {
case GL30.GL_INVALID_ENUM -> b.append("INVALID_ENUM");
case GL30.GL_INVALID_VALUE -> b.append("INVALID_VALUE");
case GL30.GL_INVALID_OPERATION -> b.append("INVALID_OP");
case GL30.GL_INVALID_FRAMEBUFFER_OPERATION -> b.append("INVALID_FB_OP");
}
System.out.println(b);
}
}
private static String readResource(String res) {
try {
InputStream is = MinimalExample.class.getResourceAsStream(res);
String s = new String(is.readAllBytes(), StandardCharsets.UTF_8);
is.close();
return s;
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
// vertex data buffer
private static final ByteBuffer buf = ByteBuffer.allocateDirect(4096);
// shader program
static int program;
// render objects
static int vao;
static int vbo;
public static void main(String[] args) {
// set buffer limit
buf.limit(4096).position(0);
// init glfw and create window
GLFW.glfwInit();
long window = GLFW.glfwCreateWindow(500, 500, "Hello", 0, 0);
// create GL
GLFW.glfwMakeContextCurrent(window);
GL.createCapabilities();
GLUtil.setupDebugMessageCallback(System.out);
GLFW.glfwSetErrorCallback(GLFWErrorCallback.createPrint(System.out));
// create vertex objects
vao = GL30.glGenVertexArrays();
vbo = GL30.glGenBuffers();
GL30.glBindVertexArray(vao);
GL30.glBindBuffer(GL30.GL_ARRAY_BUFFER, vbo);
GL30.glVertexAttribPointer(0, 3, GL30.GL_FLOAT, false, 7 * 4, 0);
GL30.glVertexAttribPointer(1, 4, GL30.GL_FLOAT, false, 7 * 4, 7 * 3);
GL30.glEnableVertexAttribArray(0);
GL30.glEnableVertexAttribArray(1);
// compile and link shaders
int vertexShader = GL30.glCreateShader(GL30.GL_VERTEX_SHADER);
int fragmentShader = GL30.glCreateShader(GL30.GL_FRAGMENT_SHADER);
GL30.glShaderSource(vertexShader, readResource("/test.vsh"));
GL30.glShaderSource(fragmentShader, readResource("/test.fsh"));
GL30.glCompileShader(vertexShader);
GL30.glCompileShader(fragmentShader);
program = GL30.glCreateProgram();
GL30.glAttachShader(program, vertexShader);
GL30.glAttachShader(program, fragmentShader);
GL30.glLinkProgram(program);
// render loop
while (!GLFW.glfwWindowShouldClose(window)) {
// poll events
GLFW.glfwPollEvents();
// clear screen
GL30.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
GL30.glClear(GL30.GL_COLOR_BUFFER_BIT);
// render
render();
// swap buffers
GLFW.glfwSwapBuffers(window);
}
}
static void render() {
// put vertex data
// manual to simulate graphics library
putVec3(0.25f, 0.25f, 1f); putVec4(1.0f, 0.0f, 0.0f, 1.0f);
putVec3(0.75f, 0.25f, 1f); putVec4(0.0f, 1.0f, 0.0f, 1.0f);
putVec3(0.50f, 0.75f, 1f); putVec4(0.0f, 0.0f, 1.0f, 1.0f);
buf.flip();
// bind program
GL30.glUseProgram(program);
// bind vertex array
GL30.glBindVertexArray(vao);
GL30.glEnableVertexAttribArray(0);
GL30.glEnableVertexAttribArray(1);
// upload graphics data and draw
GL30.glBindBuffer(GL30.GL_ARRAY_BUFFER, vbo);
GL30.glBufferData(GL30.GL_ARRAY_BUFFER, buf, GL30.GL_STATIC_DRAW);
GL30.glDrawArrays(GL30.GL_TRIANGLES, 0, 3);
// reset vertex data buffer
buf.position(0);
buf.limit(buf.capacity());
}
//////////////////////////////////////////
static void putVec3(float x, float y, float z) {
buf.putFloat(x);
buf.putFloat(y);
buf.putFloat(z);
}
static void putVec4(float x, float y, float z, float w) {
buf.putFloat(x);
buf.putFloat(y);
buf.putFloat(z);
buf.putFloat(w);
}
}
All shader code (merged into one block for convenience, actually two files in reality):
/*
test.vsh
*/
#version 330 core
in layout(location = 0) vec3 position;
in layout(location = 1) vec4 col;
out layout(location = 0) vec4 fColor;
void main() {
gl_Position = vec4(position * 1000, 1);
fColor = col;
}
/*
test.fsh
*/
#version 330 core
in layout(location = 0) vec4 fColor;
out vec4 outColor;
void main() {
outColor = fColor;
}
Edit: I know the contents of the ByteBuffer are correct, checking them each frame yields:
[ 0.25, 0.25, 1.0, 1.0, 0.0, 0.0, 1.0, 0.75, 0.25, 1.0, 0.0, 1.0, 0.0, 1.0, 0.5, 0.75, 1.0, 0.0, 0.0, 1.0, 1.0, ]
There are 3 problems.
The first problem is buf. ByteBuffer::allocateDirect allocates a BIG_ENDIAN buffer, but OpenGL is a C library, which is LITTLE_ENDIAN. So it must be allocated with BufferUtils::createByteBuffer or MemoryUtil::memAlloc.
The second problem is the offset of glVertexAttribPointer. It should be 4 * 3 for 3 floats.
The third problem is your vertex shader. The input position was multiplied by 1000, which is out of the viewport. To avoid this, you just have to remove the multiplication or use a projection matrix.
gl_Position = vec4(position /* * 1000 */, 1);
BTW I recommend to use ByteBuffer::clear to reset the vertex data buffer, because it is a builtin method.
Fixed Java code:
import org.lwjgl.BufferUtils;
import org.lwjgl.glfw.GLFW;
import org.lwjgl.glfw.GLFWErrorCallback;
import org.lwjgl.opengl.GL;
import org.lwjgl.opengl.GL30;
import org.lwjgl.opengl.GLUtil;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
public class MinimalExample {
private static void debugPrintErrors() {
System.out.println("-> DEBUG PRINT ERRORS");
int error;
while ((error = GL30.glGetError()) != GL30.GL_NO_ERROR) {
StringBuilder b = new StringBuilder(" ");
switch (error) {
case GL30.GL_INVALID_ENUM -> b.append("INVALID_ENUM");
case GL30.GL_INVALID_VALUE -> b.append("INVALID_VALUE");
case GL30.GL_INVALID_OPERATION -> b.append("INVALID_OP");
case GL30.GL_INVALID_FRAMEBUFFER_OPERATION -> b.append("INVALID_FB_OP");
}
System.out.println(b);
}
}
private static String readResource(String res) {
try {
InputStream is = MinimalExample.class.getResourceAsStream(res);
String s = new String(is.readAllBytes(), StandardCharsets.UTF_8);
is.close();
return s;
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
// vertex data buffer
private static final ByteBuffer buf = BufferUtils.createByteBuffer(4096);
// shader program
static int program;
// render objects
static int vao;
static int vbo;
public static void main(String[] args) {
// set buffer limit
buf.limit(4096).position(0);
// init glfw and create window
GLFW.glfwInit();
long window = GLFW.glfwCreateWindow(500, 500, "Hello", 0, 0);
// create GL
GLFW.glfwMakeContextCurrent(window);
GL.createCapabilities();
GLUtil.setupDebugMessageCallback(System.out);
GLFW.glfwSetErrorCallback(GLFWErrorCallback.createPrint(System.out));
// create vertex objects
vao = GL30.glGenVertexArrays();
vbo = GL30.glGenBuffers();
GL30.glBindVertexArray(vao);
GL30.glBindBuffer(GL30.GL_ARRAY_BUFFER, vbo);
GL30.glVertexAttribPointer(0, 3, GL30.GL_FLOAT, false, 7 * 4, 0);
GL30.glVertexAttribPointer(1, 4, GL30.GL_FLOAT, false, 7 * 4, 4 * 3);
GL30.glEnableVertexAttribArray(0);
GL30.glEnableVertexAttribArray(1);
// compile and link shaders
int vertexShader = GL30.glCreateShader(GL30.GL_VERTEX_SHADER);
int fragmentShader = GL30.glCreateShader(GL30.GL_FRAGMENT_SHADER);
GL30.glShaderSource(vertexShader, readResource("/test.vsh"));
GL30.glShaderSource(fragmentShader, readResource("/test.fsh"));
GL30.glCompileShader(vertexShader);
GL30.glCompileShader(fragmentShader);
program = GL30.glCreateProgram();
GL30.glAttachShader(program, vertexShader);
GL30.glAttachShader(program, fragmentShader);
GL30.glLinkProgram(program);
// render loop
while (!GLFW.glfwWindowShouldClose(window)) {
// poll events
GLFW.glfwPollEvents();
// clear screen
GL30.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
GL30.glClear(GL30.GL_COLOR_BUFFER_BIT);
// render
render();
// swap buffers
GLFW.glfwSwapBuffers(window);
}
}
static void render() {
// put vertex data
// manual to simulate graphics library
putVec3(0.25f, 0.25f, 1f); putVec4(1.0f, 0.0f, 0.0f, 1.0f);
putVec3(0.75f, 0.25f, 1f); putVec4(0.0f, 1.0f, 0.0f, 1.0f);
putVec3(0.50f, 0.75f, 1f); putVec4(0.0f, 0.0f, 1.0f, 1.0f);
buf.flip();
// bind program
GL30.glUseProgram(program);
// bind vertex array
GL30.glBindVertexArray(vao);
GL30.glEnableVertexAttribArray(0);
GL30.glEnableVertexAttribArray(1);
// upload graphics data and draw
GL30.glBindBuffer(GL30.GL_ARRAY_BUFFER, vbo);
GL30.glBufferData(GL30.GL_ARRAY_BUFFER, buf, GL30.GL_STATIC_DRAW);
GL30.glDrawArrays(GL30.GL_TRIANGLES, 0, 3);
// reset vertex data buffer
buf.clear();
}
//////////////////////////////////////////
static void putVec3(float x, float y, float z) {
buf.putFloat(x);
buf.putFloat(y);
buf.putFloat(z);
}
static void putVec4(float x, float y, float z, float w) {
buf.putFloat(x);
buf.putFloat(y);
buf.putFloat(z);
buf.putFloat(w);
}
}

OpenGL first steps with light and normals

I'm playing around with some basic OpenGL stuff and I'm trying to set up a simple square with lighting enabled, but the lighting is not correct so there is something wrong with my normals i guess.
Or is my understanding of normals totally wrong?
Here's my rendering code (btw I'm using lwjgl):
public class Renderer {
DisplayMode displayMode;
int i;
int width;
int height;
private boolean drawAxes = false;
private float rotation = 40.0f;
private float zoom = -20f;
// ----------- Variables added for Lighting Test -----------//
private FloatBuffer matSpecular;
private FloatBuffer lightPosition;
private FloatBuffer whiteLight;
private FloatBuffer lModelAmbient;
public Renderer(int width, int height) {
this.width = width;
this.height = height;
}
public static Renderer start() throws LWJGLException {
Renderer r = new Renderer(800, 600);
r.initContext();
r.run();
return r;
}
private void initContext() throws LWJGLException {
Display.setFullscreen(false);
DisplayMode d[] = Display.getAvailableDisplayModes();
for (int i = 0; i < d.length; i++) {
if (d[i].getWidth() == width && d[i].getHeight() == height && d[i].getBitsPerPixel() == 32) {
displayMode = d[i];
break;
}
}
Display.setDisplayMode(displayMode);
Display.create();
}
private void run() {
initGL();
while (!Display.isCloseRequested()) {
preRender();
render();
Display.update();
Display.sync(60);
}
Display.destroy();
}
private void initGL() {
GL11.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); // Black Background
GL11.glClearDepth(1.0); // Depth Buffer Setup
GL11.glEnable(GL11.GL_DEPTH_TEST); // Enables Depth Testing
GL11.glDepthFunc(GL11.GL_LEQUAL); // The Type Of Depth Testing To Do
GL11.glMatrixMode(GL11.GL_PROJECTION); // Select The Projection Matrix
GL11.glLoadIdentity(); // Reset The Projection Matrix
// Calculate The Aspect Ratio Of The Window
GLU.gluPerspective(45.0f, (float) displayMode.getWidth() / (float) displayMode.getHeight(), 0.1f, 100.0f);
GL11.glMatrixMode(GL11.GL_MODELVIEW); // Select The Modelview Matrix
// Really Nice Perspective Calculations
GL11.glHint(GL11.GL_PERSPECTIVE_CORRECTION_HINT, GL11.GL_NICEST);
GL11.glPolygonMode(GL11.GL_FRONT_AND_BACK, GL11.GL_FILL);
initLightArrays();
glShadeModel(GL_SMOOTH);
glMaterial(GL_FRONT, GL_SPECULAR, matSpecular); // sets specular material color
glMaterialf(GL_FRONT, GL_SHININESS, 100.0f); // sets shininess
glLight(GL_LIGHT0, GL_POSITION, lightPosition); // sets light position
glLight(GL_LIGHT0, GL_SPECULAR, whiteLight); // sets specular light to white
glLight(GL_LIGHT0, GL_DIFFUSE, whiteLight); // sets diffuse light to white
glLightModel(GL_LIGHT_MODEL_AMBIENT, lModelAmbient); // global ambient light
glEnable(GL_LIGHTING); // enables lighting
glEnable(GL_LIGHT0); // enables light0
glEnable(GL_COLOR_MATERIAL); // enables opengl to use glColor3f to define material color
glColorMaterial(GL_FRONT, GL_AMBIENT_AND_DIFFUSE); // tell opengl glColor3f effects the ambient and diffuse properties of material
}
private void preRender() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
GL11.glTranslatef(0f, 0f, zoom);
GL11.glRotatef(-60f, 1f, 0f, 0f);
GL11.glRotatef(rotation, 0f, 0f, 1f);
}
private void render() {
FloatBuffer cBuffer = BufferUtils.createFloatBuffer(6*3);
float[] cArray = { 1f,1f,1f,
1f,1f,1f,
1f,1f,1f,
1f,1f,1f,
1f,1f,1f,
1f,1f,1f};
cBuffer.put(cArray);
cBuffer.flip();
FloatBuffer vBuffer = BufferUtils.createFloatBuffer(6*3);
float[] vArray = { 1f,1f,0f,
-1f,-1f,0,
1f,-1f,0,
1f,1f,0f,
-1f,1f,0,
-1f,-1f,0};
vBuffer.put(vArray);
vBuffer.flip();
FloatBuffer nBuffer = BufferUtils.createFloatBuffer(6*3);
float[] nArray = { 0f,0f,1f,
0f,0f,1f,
0f,0f,1f,
0f,0f,1f,
0f,0f,1f,
0f,0f,1f};
nBuffer.put(nArray);
nBuffer.flip();
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glColorPointer(3, 0, cBuffer);
glVertexPointer(3, 0, vBuffer);
glNormalPointer(3, nBuffer);
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableClientState(GL_COLOR_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
if (drawAxes) {
drawAxes(6);
}
glTranslatef(0.0f, 0.0f, 3);
glColor3f(0.1f, 0.4f, 0.9f);
}
public static void main(String[] args) throws LWJGLException {
System.setProperty("org.lwjgl.opengl.Display.allowSoftwareOpenGL", "true");
Renderer.start();
}
You are setting your normal pointer wrong:
glColorPointer(3, 0, cBuffer);
glVertexPointer(3, 0, vBuffer);
glNormalPointer(3, nBuffer);
The fixed-function GL might always expects normals to be 3-dimensional vectors, henze the size parameter (which tells the GL how many values are there in every vector) is not present in glNormalPointer. The 3 you are setting here is the stride parameter, which specifies the byte offset between consecutive array elements. Now 3 does not make any sence, it will interpret the second normal as to beginning 3 bytes into the arry, which means it combines the last byte of your first normal's x component together with 3 bytes from your first normal's y component when it reads the second normal'x s component, and so on...
Since your array is tightly packed, you can use the shortcut 0 here, like you do with the other pointers.
However, you must be aware that all of that is deprecated since almost a decade in OpenGL, modern core versions of OpenGL do not support the fixed function pipeline at all. If you are learning OpenGL nowadays, I strongly recommend you to learn modern, shader-based GL instead.
Without seeing more of your code, it's very difficult to see exactly what's going wrong.
However, I do see one thing that could be a problem:
FloatBuffer vBuffer = BufferUtils.createFloatBuffer(6*3);
float[] vArray = { 1f,1f,0f,
1f,-1f,0,
-1f,-1f,0,
1f,1f,0f,
-1f,1f,0,
-1f,-1f,0};
vBuffer.put(vArray);
vBuffer.flip();
The winding order on your triangles are not the same. The first triangle winds clockwise, whereas the second triangle winds counter-clockwise. You'll need to reorder the vertices to make sure that they wind in the same direction. OpenGL usually prefers things to wind counter-clockwise, so if I were you, I'd flip the first triangle.
If you're still getting the problem after you've done this, then post the rest of your draw code, as what you're showing here doesn't give a lot of information.

Android getOrientation() method returns bad results

I'm creating 3D Compass application.
I'm using getOrientation method to get orientation (almost same implementation like here). If I place phone on the table it works well, but when top of the phone points to the sky (minus Z axis on the picture; sphere is the Earth) getOrientation starts giving really bad results. It gives values for Z axis between 0 to 180 degrees in a few real degrees. Is there any way how to suppress this behavior? I created a little video what describes problem (sorry for bad quality). Thanks in advance.
Solution:
When you rotating model, there is difference between:
gl.glRotatef(_angleY, 0f, 1f, 0f); //ROLL
gl.glRotatef(_angleX, 1f, 0f, 0f); //ELEVATION
gl.glRotatef(_angleZ, 0f, 0f, 1f); //AZIMUTH
gl.glRotatef(_angleX, 1f, 0f, 0f); //ELEVATION
gl.glRotatef(_angleY, 0f, 1f, 0f); //ROLL
gl.glRotatef(_angleZ, 0f, 0f, 1f); //AZIMUTH
Well, I can see at least 1 problem with this approach of yours.
I assume that you combine a 3D vector corresponding to your magnetometer with an averaging low pass filter to smoothen the data. Although such approach would work great for a sensor value which varies without discontinuities, such as raw data from accelerometer, it doesn't work so great verbatim with angular variables fetched from your magnetometer. Why, one might ask?
Because those angular variables (azimuth, pitch, roll) have an upper-bound and a lower-bound, which means that any value above 180 degrees, say 181 degrees, would wrap around to 181-360 = -179 degrees, and any variable below -180 degrees would wrap around in the other direction. So when one of those angular variables get close to those thresholds (180 or -180), this variable will tend to oscillate to values close to those 2 extremes. When you blindly apply a low-pass filter to those values, you get either a smooth decreasing from 180 degrees towards -180 degrees, or a smooth increasing from -180 towards 180 degrees. Either way, the result would look quite like your video above... As long as one directly applies an averaging buffer onto the raw angle data from getOrientation(...), this problem will be present (and should be present not only for the case where the phone is upright, but also in the cases where there are azimuth angle wraparounds too... Maybe you could test for those bugs as well...).
You say that you tested this with a buffer size of 1. Theoretically, the problem should not be present if there is no averaging at all, although in some implementations of a circular buffer I've seen in the past, it could mean that there is still averaging done with at least 1 past value, not that there is no averaging at all. If this is your case, we have found the root cause of your bug.
Unfortunately, there isn't much of an elegant solution that could be implemented while sticking with your standard averaging filter. What I usually do in this case is switch to another type of low pass filter, which doesn't need any deep buffer to operate: a simple IIR filter (order 1):
diff = x[n] - y[n-1]
y[n] - y[n-1] = alpha * (x[n] - y[n-1]) = alpha * diff
...where y is the filtered angle, x is the raw angle, and alpha<1 is analogous to a time constant, as alpha=1 corresponds to the no-filter case, and the frequency cutoff of the low-pass filter gets lowered as alpha approaches zero. An acute eye would probably have noticed by now that this corresponds to a simple Proportional Controller.
Such a filter allows the compensation of the wraparound of the angle value because we can add or subtract 360 to diff so as to ensure that abs(diff)<=180, which in turn ensures that the filtered angle value will always increase/decrease in the optimal direction to reach its "setpoint".
An example function call, which is to be scheduled periodically, that calculates a filtered angle value y for a given raw angle value x, could be something like this:
private float restrictAngle(float tmpAngle){
while(tmpAngle>=180) tmpAngle-=360;
while(tmpAngle<-180) tmpAngle+=360;
return tmpAngle;
}
//x is a raw angle value from getOrientation(...)
//y is the current filtered angle value
private float calculateFilteredAngle(float x, float y){
final float alpha = 0.1f;
float diff = x-y;
//here, we ensure that abs(diff)<=180
diff = restrictAngle(diff);
y += alpha*diff;
//ensure that y stays within [-180, 180[ bounds
y = restrictAngle(y);
return y;
}
The function calculateFilteredAngle(float x, float y) can then be called periodically using something like this (example for azimuth angle from getOrientation(...) function:
filteredAzimuth = calculateFilteredAngle(azimuth, filteredAzimuth);
Using this method, the filter would not misbehave like the averaging filter as mentioned by the OP.
As I could not load the .apk uploaded by the OP, I decided to implement my own test project in order to see if the corrections work. Here is the entire code (it does not use a .XML for the main layout, so I did not include it). Simply copy it to a test project to see if it works on a specific device (tested functional on a HTC Desire w/ Android v. 2.1):
File 1: Compass3DActivity.java:
package com.epichorns.compass3D;
import android.app.Activity;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Bundle;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
public class Compass3DActivity extends Activity {
//Textviews for showing angle data
TextView mTextView_azimuth;
TextView mTextView_pitch;
TextView mTextView_roll;
TextView mTextView_filtered_azimuth;
TextView mTextView_filtered_pitch;
TextView mTextView_filtered_roll;
float mAngle0_azimuth=0;
float mAngle1_pitch=0;
float mAngle2_roll=0;
float mAngle0_filtered_azimuth=0;
float mAngle1_filtered_pitch=0;
float mAngle2_filtered_roll=0;
private Compass3DView mCompassView;
private SensorManager sensorManager;
//sensor calculation values
float[] mGravity = null;
float[] mGeomagnetic = null;
float Rmat[] = new float[9];
float Imat[] = new float[9];
float orientation[] = new float[3];
SensorEventListener mAccelerometerListener = new SensorEventListener(){
public void onAccuracyChanged(Sensor sensor, int accuracy) {}
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER){
mGravity = event.values.clone();
processSensorData();
}
}
};
SensorEventListener mMagnetometerListener = new SensorEventListener(){
public void onAccuracyChanged(Sensor sensor, int accuracy) {}
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD){
mGeomagnetic = event.values.clone();
processSensorData();
update();
}
}
};
private float restrictAngle(float tmpAngle){
while(tmpAngle>=180) tmpAngle-=360;
while(tmpAngle<-180) tmpAngle+=360;
return tmpAngle;
}
//x is a raw angle value from getOrientation(...)
//y is the current filtered angle value
private float calculateFilteredAngle(float x, float y){
final float alpha = 0.3f;
float diff = x-y;
//here, we ensure that abs(diff)<=180
diff = restrictAngle(diff);
y += alpha*diff;
//ensure that y stays within [-180, 180[ bounds
y = restrictAngle(y);
return y;
}
public void processSensorData(){
if (mGravity != null && mGeomagnetic != null) {
boolean success = SensorManager.getRotationMatrix(Rmat, Imat, mGravity, mGeomagnetic);
if (success) {
SensorManager.getOrientation(Rmat, orientation);
mAngle0_azimuth = (float)Math.toDegrees((double)orientation[0]); // orientation contains: azimut, pitch and roll
mAngle1_pitch = (float)Math.toDegrees((double)orientation[1]); //pitch
mAngle2_roll = -(float)Math.toDegrees((double)orientation[2]); //roll
mAngle0_filtered_azimuth = calculateFilteredAngle(mAngle0_azimuth, mAngle0_filtered_azimuth);
mAngle1_filtered_pitch = calculateFilteredAngle(mAngle1_pitch, mAngle1_filtered_pitch);
mAngle2_filtered_roll = calculateFilteredAngle(mAngle2_roll, mAngle2_filtered_roll);
}
mGravity=null; //oblige full new refresh
mGeomagnetic=null; //oblige full new refresh
}
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
LinearLayout ll = new LinearLayout(this);
LinearLayout.LayoutParams llParams = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.FILL_PARENT, LinearLayout.LayoutParams.FILL_PARENT);
ll.setLayoutParams(llParams);
ll.setOrientation(LinearLayout.VERTICAL);
ViewGroup.LayoutParams txtParams = new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
mTextView_azimuth = new TextView(this);
mTextView_azimuth.setLayoutParams(txtParams);
mTextView_pitch = new TextView(this);
mTextView_pitch.setLayoutParams(txtParams);
mTextView_roll = new TextView(this);
mTextView_roll.setLayoutParams(txtParams);
mTextView_filtered_azimuth = new TextView(this);
mTextView_filtered_azimuth.setLayoutParams(txtParams);
mTextView_filtered_pitch = new TextView(this);
mTextView_filtered_pitch.setLayoutParams(txtParams);
mTextView_filtered_roll = new TextView(this);
mTextView_filtered_roll.setLayoutParams(txtParams);
mCompassView = new Compass3DView(this);
ViewGroup.LayoutParams compassParams = new ViewGroup.LayoutParams(200,200);
mCompassView.setLayoutParams(compassParams);
ll.addView(mCompassView);
ll.addView(mTextView_azimuth);
ll.addView(mTextView_pitch);
ll.addView(mTextView_roll);
ll.addView(mTextView_filtered_azimuth);
ll.addView(mTextView_filtered_pitch);
ll.addView(mTextView_filtered_roll);
setContentView(ll);
sensorManager = (SensorManager) this.getSystemService(Context.SENSOR_SERVICE);
sensorManager.registerListener(mAccelerometerListener, sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_UI);
sensorManager.registerListener(mMagnetometerListener, sensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD), SensorManager.SENSOR_DELAY_UI);
update();
}
#Override
public void onDestroy(){
super.onDestroy();
sensorManager.unregisterListener(mAccelerometerListener);
sensorManager.unregisterListener(mMagnetometerListener);
}
private void update(){
mCompassView.changeAngles(mAngle1_filtered_pitch, mAngle2_filtered_roll, mAngle0_filtered_azimuth);
mTextView_azimuth.setText("Azimuth: "+String.valueOf(mAngle0_azimuth));
mTextView_pitch.setText("Pitch: "+String.valueOf(mAngle1_pitch));
mTextView_roll.setText("Roll: "+String.valueOf(mAngle2_roll));
mTextView_filtered_azimuth.setText("Azimuth: "+String.valueOf(mAngle0_filtered_azimuth));
mTextView_filtered_pitch.setText("Pitch: "+String.valueOf(mAngle1_filtered_pitch));
mTextView_filtered_roll.setText("Roll: "+String.valueOf(mAngle2_filtered_roll));
}
}
File 2: Compass3DView.java:
package com.epichorns.compass3D;
import android.content.Context;
import android.opengl.GLSurfaceView;
public class Compass3DView extends GLSurfaceView {
private Compass3DRenderer mRenderer;
public Compass3DView(Context context) {
super(context);
mRenderer = new Compass3DRenderer(context);
setRenderer(mRenderer);
}
public void changeAngles(float angle0, float angle1, float angle2){
mRenderer.setAngleX(angle0);
mRenderer.setAngleY(angle1);
mRenderer.setAngleZ(angle2);
}
}
File 3: Compass3DRenderer.java:
package com.epichorns.compass3D;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.opengl.GLSurfaceView;
public class Compass3DRenderer implements GLSurfaceView.Renderer {
Context mContext;
// a raw buffer to hold indices
ShortBuffer _indexBuffer;
// raw buffers to hold the vertices
FloatBuffer _vertexBuffer0;
FloatBuffer _vertexBuffer1;
FloatBuffer _vertexBuffer2;
FloatBuffer _vertexBuffer3;
FloatBuffer _vertexBuffer4;
FloatBuffer _vertexBuffer5;
int _numVertices = 3; //standard triangle vertices = 3
FloatBuffer _textureBuffer0123;
//private FloatBuffer _light0Position;
//private FloatBuffer _light0Ambient;
float _light0Position[] = new float[]{10.0f, 10.0f, 10.0f, 0.0f};
float _light0Ambient[] = new float[]{0.05f, 0.05f, 0.05f, 1.0f};
float _light0Diffuse[] = new float[]{0.5f, 0.5f, 0.5f, 1.0f};
float _light0Specular[] = new float[]{0.7f, 0.7f, 0.7f, 1.0f};
float _matAmbient[] = new float[] { 0.6f, 0.6f, 0.6f, 1.0f };
float _matDiffuse[] = new float[] { 0.6f, 0.6f, 0.6f, 1.0f };
private float _angleX=0f;
private float _angleY=0f;
private float _angleZ=0f;
Compass3DRenderer(Context context){
super();
mContext = context;
}
public void setAngleX(float angle) {
_angleX = angle;
}
public void setAngleY(float angle) {
_angleY = angle;
}
public void setAngleZ(float angle) {
_angleZ = angle;
}
FloatBuffer InitFloatBuffer(float[] src){
ByteBuffer bb = ByteBuffer.allocateDirect(4*src.length);
bb.order(ByteOrder.nativeOrder());
FloatBuffer inBuf = bb.asFloatBuffer();
inBuf.put(src);
return inBuf;
}
ShortBuffer InitShortBuffer(short[] src){
ByteBuffer bb = ByteBuffer.allocateDirect(2*src.length);
bb.order(ByteOrder.nativeOrder());
ShortBuffer inBuf = bb.asShortBuffer();
inBuf.put(src);
return inBuf;
}
//Init data for our rendered pyramid
private void initTriangles() {
//Side faces triangles
float[] coords = {
-0.25f, -0.5f, 0.25f,
0.25f, -0.5f, 0.25f,
0f, 0.5f, 0f
};
float[] coords1 = {
0.25f, -0.5f, 0.25f,
0.25f, -0.5f, -0.25f,
0f, 0.5f, 0f
};
float[] coords2 = {
0.25f, -0.5f, -0.25f,
-0.25f, -0.5f, -0.25f,
0f, 0.5f, 0f
};
float[] coords3 = {
-0.25f, -0.5f, -0.25f,
-0.25f, -0.5f, 0.25f,
0f, 0.5f, 0f
};
//Base triangles
float[] coords4 = {
-0.25f, -0.5f, 0.25f,
0.25f, -0.5f, -0.25f,
0.25f, -0.5f, 0.25f
};
float[] coords5 = {
-0.25f, -0.5f, 0.25f,
-0.25f, -0.5f, -0.25f,
0.25f, -0.5f, -0.25f
};
float[] textures0123 = {
// Mapping coordinates for the vertices (UV mapping CW)
0.0f, 0.0f, // bottom left
1.0f, 0.0f, // bottom right
0.5f, 1.0f, // top ctr
};
_vertexBuffer0 = InitFloatBuffer(coords);
_vertexBuffer0.position(0);
_vertexBuffer1 = InitFloatBuffer(coords1);
_vertexBuffer1.position(0);
_vertexBuffer2 = InitFloatBuffer(coords2);
_vertexBuffer2.position(0);
_vertexBuffer3 = InitFloatBuffer(coords3);
_vertexBuffer3.position(0);
_vertexBuffer4 = InitFloatBuffer(coords4);
_vertexBuffer4.position(0);
_vertexBuffer5 = InitFloatBuffer(coords5);
_vertexBuffer5.position(0);
_textureBuffer0123 = InitFloatBuffer(textures0123);
_textureBuffer0123.position(0);
short[] indices = {0, 1, 2};
_indexBuffer = InitShortBuffer(indices);
_indexBuffer.position(0);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
gl.glEnable(GL10.GL_CULL_FACE); // enable the differentiation of which side may be visible
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glFrontFace(GL10.GL_CCW); // which is the front? the one which is drawn counter clockwise
gl.glCullFace(GL10.GL_BACK); // which one should NOT be drawn
initTriangles();
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
public void onDrawFrame(GL10 gl) {
gl.glPushMatrix();
gl.glClearColor(0, 0, 0, 1.0f); //clipping backdrop color
// clear the color buffer to show the ClearColor we called above...
gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
// set rotation
gl.glRotatef(_angleY, 0f, 1f, 0f); //ROLL
gl.glRotatef(_angleX, 1f, 0f, 0f); //ELEVATION
gl.glRotatef(_angleZ, 0f, 0f, 1f); //AZIMUTH
//Draw our pyramid
//4 side faces
gl.glColor4f(0.5f, 0f, 0f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer0);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glColor4f(0.5f, 0.5f, 0f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer1);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glColor4f(0f, 0.5f, 0f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer2);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glColor4f(0f, 0.5f, 0.5f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer3);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
//Base face
gl.glColor4f(0f, 0f, 0.5f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer4);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, _vertexBuffer5);
gl.glDrawElements(GL10.GL_TRIANGLES, _numVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
gl.glPopMatrix();
}
public void onSurfaceChanged(GL10 gl, int w, int h) {
gl.glViewport(0, 0, w, h);
gl.glViewport(0, 0, w, h);
}
}
Please note that this code does not compensate for tablet default landscape orientation, so it is only expected to work correctly on a phone (I didn't have a tablet close by to test any correction code).
You should probably try a longer delay like Game and/or keep/increase the size of your circular buffer. The sensors (accelerometer, compass, etc.) on mobile devices are inherently noisy so when I asked about 'low pass filter', I meant do you use more data to decrease the frequency of your app usable updates. Your video was done inside, I would also recommend going to a place with less EM interference such as a park just to check that the behavior is consistent as well as the standard compass reset action (rotate device in figure-8). In the end you may have to apply some heuristics to throw out the 'bad' data to make a smoother experience for the user.
Well I had exactly the same problem as I was retrieving orientation. Thing is that I didn't get is solved (I had to set a constraint when it comes to the device position when retrieving it), and I don't know if you'll ever be able to.
Pick a magnetical compass and try to get north orientation when the compass is in the situation you describe - you will get the same non-sense results. So you can't really expect the device's compass to do it any better !
Few words about filtering, with your permissions.
I would suggest to do averaging on Magnetic Field Vector itself before turning it into angles.
It is wrong to do averaging/smoothing only on angles without use of some sort of magnitude. Angles themselves are not providing enough data to detect direction/heading/bearing.
Example: When you want to know average wind direction during the whole day you must use the strength of the wind, not just only angles. If you will average only angles you will get absolutely wrong wind direction.
As for bearing direction I would use the speed for magnitude.

Android: OpenGL ES2 Texture not working

UPDATE: got rid of the line GLES20.glEnable(GLES20.GL_TEXTURE_2D); But the line GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, 256, 256, 0, GLES20.GL_RGB, GLES20.GL_BYTE, ByteBuffer.wrap(pixels)); gives GL_INVALID_ENUM... pixel buffer length is 196608.
Project files: http://godofgod.co.uk/my_files/NightCamPrj.zip
I am trying to get camera data to a OpenGL ES2 shader and the camera stuff appears to work but I cannot get the texture to work even when I try my own values. I get a black screen. Here is the code:
package com.matthewmitchell.nightcam;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.Scanner;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.content.res.AssetManager;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
public class MyRenderer implements GLSurfaceView.Renderer{
private FloatBuffer vertices;
private FloatBuffer texcoords;
private int mProgram;
private int maPositionHandle;
private int gvTexCoordHandle;
private int gvSamplerHandle;
private static Context context;
int[] camera_texture;
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
initShapes();
GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
Debug.out("Hello init.");
//Shaders
int vertexShader = 0;
int fragmentShader = 0;
try {
vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, readFile("vertex.vsh"));
fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, readFile("fragment.fsh"));
} catch (IOException e) {
Debug.out("The shaders could not be found.");
e.printStackTrace();
}
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
Debug.out("VS LOG: " + GLES20.glGetShaderInfoLog(vertexShader));
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
Debug.out("FS LOG: " + GLES20.glGetShaderInfoLog(fragmentShader));
GLES20.glLinkProgram(mProgram); // creates OpenGL program executables
Debug.out("PROG LOG: " + GLES20.glGetProgramInfoLog(mProgram));
// get handles
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
gvTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "a_texCoord");
gvSamplerHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");
camera_texture = null;
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
}
private void initShapes(){
float triangleCoords[] = {
// X, Y, Z
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
-1.0f, 1.0f, 0.0f,
1.0f, 1.0f, 0.0f,
};
float texcoordf[] = {
// X, Y, Z
-1.0f,-1.0f,
1.0f,-1.0f,
-1.0f,1.0f,
1.0f,1.0f,
}; //Even if wrong way around it should produce a texture with these coordinates on the screen.
// initialize vertex Buffer for vertices
ByteBuffer vbb = ByteBuffer.allocateDirect(triangleCoords.length * 4);
vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
vertices = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
vertices.put(triangleCoords); // add the coordinates to the FloatBuffer
vertices.position(0); // set the buffer to read the first coordinate
// initialize vertex Buffer for texcoords
vbb = ByteBuffer.allocateDirect(texcoordf.length * 4);
vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
texcoords = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
texcoords.put(texcoordf); // add the coordinates to the FloatBuffer
texcoords.position(0); // set the buffer to read the first coordinate
}
private static String readFile(String path) throws IOException {
//Load file from assets folder using context given by the activity class
AssetManager assetManager = context.getAssets();
InputStream stream = assetManager.open(path);
try {
return new Scanner(stream).useDelimiter("\\A").next();
}
finally {
stream.close();
}
}
private int loadShader(int type, String shaderCode){
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public void onDrawFrame(GL10 unused) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
if(camera_texture == null){
return;
}
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// Prepare the triangle data
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 0, vertices);
GLES20.glVertexAttribPointer(gvTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, texcoords);
GLES20.glEnableVertexAttribArray(maPositionHandle);
GLES20.glEnableVertexAttribArray(gvTexCoordHandle);
//Bind texture
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, camera_texture[0]);
GLES20.glUniform1i(gvSamplerHandle, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
//Disable arrays
GLES20.glDisableVertexAttribArray(maPositionHandle);
GLES20.glDisableVertexAttribArray(gvTexCoordHandle);
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
public void takeContext(Context mcontext) {
context = mcontext;
}
void bindCameraTexture(byte[] data,int w,int h) {
//Takes pixel data from camera and makes texture
byte[] pixels = new byte[256*256*3]; //Testing simple 256x256 texture. Will update for camera resolution
for(int x = 0;x < 256;x++){
for(int y = 0;y < 256;y++){
//Ignore camera data, use test values.
pixels[(x*256+y)*3] = 0;
pixels[(x*256+y)*3+1] = 100;
pixels[(x*256+y)*3+2] = 120;
}
}
//Debug.out("PX:" + pixels[0] + " " + pixels[1] + " " + pixels[2]);
//Make new texture for new data
if (camera_texture == null){
camera_texture = new int[1];
}else{
GLES20.glDeleteTextures(1, camera_texture, 0);
}
GLES20.glGenTextures(1, camera_texture, 0);
int tex = camera_texture[0];
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, 256, 256, 0, GLES20.GL_RGB, GLES20.GL_BYTE, ByteBuffer.wrap(pixels));
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
}
}
Here is the vertex shader code:
attribute vec4 vPosition;
attribute vec2 a_texCoord;
varying vec2 v_texCoord;
void main(){
gl_Position = vPosition;
v_texCoord = a_texCoord;
}
Here is the fragment shader code:
precision mediump float;
varying vec2 v_texCoord;
uniform sampler2D s_texture;
void main(){
gl_FragColor = texture2D(s_texture, v_texCoord);
}
We can ignore the camera stuff here because I'm using test values. I'm using a test 256x256 texture. I've done everything I've seen in examples.
Why it is black and how can I make it show?
I see that you're using glGetAttribLocation() to retrieve the location of s_texture. This is a uniform variable, not an attribute. Try using glGetUniformLocation() instead for this one.
I don't know if this will solve all of your problems, but it's something that needs to be done for sure.
it is not seen from your code but it seems to me that you're not calling the bindCameraTexture from the place where there is rendering context (while you should do that in onSurfaceCreated or onSurfaceChanged).
I finished the sample with camera preview as the texture. the key difference with your code is :
I use SurfaceTexture to connect the camera preview to the texture used in the openGL ES.
I use the matrix generated by SurfaceTexture to adjust the output of camera preview, otherwise there is black flicker area.
I do not call the glBindTexture() explicitly on the texture used for camera preview.
Good Luck.

Spotlight not working

I'm trying to get a spotlight working but it's not showing up could someone have a look and see where I've went wrong?
package water3;
import Common.TextureReader;
import com.sun.opengl.util.BufferUtil;
import java.io.IOException;
import javax.media.opengl.glu.GLUquadric;
import javax.media.opengl.GL;
import javax.media.opengl.GLAutoDrawable;
import javax.media.opengl.GLEventListener;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import javax.media.opengl.glu.GLU;
class Renderer implements GLEventListener {
private GLUquadric quadric;
private Object3D object3D;
private float[] LightPos = {0.0f, 5.0f, -4.0f, 1.0f}; // Light Position
private boolean LightUp, LightDown, LightLeft, LightRight, LightForward, LightBackward;
private int[] textures = new int[3]; // Storage For 3 Textures
double aNum = 1;
private boolean aDown =false;
private boolean up =false;
private GLU glu = new GLU();
public void init(GLAutoDrawable drawable) {
GL gl = drawable.getGL();
try {
loadGLTextures(drawable);
} catch (IOException e) {
System.out.println("Couldn't load model/Texture");
throw new RuntimeException(e);
}
/*
gl.glShadeModel(GL.GL_SMOOTH); // Enable Smooth Shading
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.5f); // Black Background
gl.glClearDepth(1.0f); // Depth Buffer Setup
gl.glClearStencil(0); // Stencil Buffer Setup
gl.glEnable(GL.GL_DEPTH_TEST); // Enables Depth Testing
gl.glDepthFunc(GL.GL_LEQUAL); // The Type Of Depth Testing To Do
gl.glHint(GL.GL_PERSPECTIVE_CORRECTION_HINT, GL.GL_NICEST); // Really Nice Perspective Calculations
/* gl.glLightfv(GL.GL_LIGHT1, GL.GL_POSITION, LightPos, 0); // Set Light1 Position
gl.glLightfv(GL.GL_LIGHT1, GL.GL_AMBIENT, LightAmb, 0); // Set Light1 Ambience
gl.glLightfv(GL.GL_LIGHT1, GL.GL_DIFFUSE, LightDif, 0); // Set Light1 Diffuse
gl.glLightfv(GL.GL_LIGHT1, GL.GL_SPECULAR, LightSpc, 0); // Set Light1 Specular
gl.glEnable(GL.GL_LIGHT1);
*/// Enable Light1
// Enable Lighting
/*
gl.glMaterialfv(GL.GL_FRONT, GL.GL_AMBIENT, MatAmb, 0); // Set Material Ambience
gl.glMaterialfv(GL.GL_FRONT, GL.GL_DIFFUSE, MatDif, 0); // Set Material Diffuse
gl.glMaterialfv(GL.GL_FRONT, GL.GL_SPECULAR, MatSpc, 0); // Set Material Specular
gl.glMaterialfv(GL.GL_FRONT, GL.GL_SHININESS, MatShn, 0); // Set Material Shininess
*/
gl.glClearColor(0,0,0,1);
gl.glEnable(GL.GL_LIGHT0);
gl.glEnable(GL.GL_DEPTH_TEST);
gl.glShadeModel(GL.GL_SMOOTH);
gl.glLightModeli(GL.GL_LIGHT_MODEL_TWO_SIDE, GL.GL_TRUE);
gl.glCullFace(GL.GL_BACK); // Set Culling Face To Back Face
gl.glEnable(GL.GL_CULL_FACE); // Enable Culling
gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // Set Clear Color (Greenish Color)
quadric = glu.gluNewQuadric(); // Initialize Quadratic
glu.gluQuadricNormals(quadric, GL.GL_SMOOTH); // Enable Smooth Normal Generation
glu.gluQuadricTexture(quadric, false);
}
public void display(GLAutoDrawable drawable) {
GL gl = drawable.getGL();
// Clear Color Buffer, Depth Buffer, Stencil Buffer
gl.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT | GL.GL_STENCIL_BUFFER_BIT);
gl.glLoadIdentity();
SetSpotlight(gl);
SetFloorMaterial(gl);
gl.glPushMatrix();
// Reset Modelview Matrix
gl.glTranslatef(0.0f, 0.0f, -20.0f); // Zoom Into The Screen 20 Units
gl.glEnable(GL.GL_TEXTURE_2D); // Enable Texture Mapping ( NEW )
drawRoom(gl); // Draw The Room
gl.glPopMatrix();
gl.glFlush(); // Flush The OpenGL Pipeline
}
private void drawRoom(GL gl) { // Draw The Room (Box)
gl.glBegin(GL.GL_QUADS); // Begin Drawing Quads
// Floor
gl.glNormal3f(0.0f, 1.0f, 0.0f); // Normal Pointing Up
gl.glVertex3f(-20.0f, -20.0f, -40.0f); // Back Left
gl.glVertex3f(-20.0f, -20.0f, 40.0f); // Front Left
gl.glVertex3f(20.0f, -20.0f, 40.0f); // Front Right
gl.glVertex3f(20.0f, -20.0f, -40.0f); // Back Right
// Ceiling
gl.glNormal3f(0.0f, -1.0f, 0.0f); // Normal Point Down
gl.glVertex3f(-10.0f, 10.0f, 20.0f); // Front Left
gl.glVertex3f(-10.0f, 10.0f, -20.0f); // Back Left
gl.glVertex3f(10.0f, 10.0f, -20.0f); // Back Right
gl.glVertex3f(10.0f, 10.0f, 20.0f); // Front Right
// Back Wall
gl.glNormal3f(0.0f, 0.0f, -1.0f); // Normal Pointing Towards Viewer
gl.glVertex3f(20.0f, 20.0f, 30.0f); // Top Right
gl.glVertex3f(20.0f, -20.0f, 30.0f); // Bottom Right
gl.glVertex3f(-20.0f, -20.0f, 30.0f); // Bottom Left
gl.glVertex3f(-20.0f, 20.0f, 30.0f); // Top Left
// Left Wall
gl.glNormal3f(1.0f, 0.0f, 0.0f); // Normal Pointing Right
gl.glVertex3f(-20.0f, 20.0f, 30.0f); // Top Front
gl.glVertex3f(-20.0f, -20.0f, 30.0f); // Bottom Front
gl.glVertex3f(-20.0f, -20.0f, -30.0f); // Bottom Back
gl.glVertex3f(-20.0f, 20.0f, -30.0f); // Top Back
// Right Wall
gl.glNormal3f(-1.0f, 0.0f, 0.0f); // Normal Pointing Left
gl.glVertex3f(20.0f, 20.0f, -30.0f); // Top Back
gl.glVertex3f(20.0f, -20.0f, -30.0f); // Bottom Back
gl.glVertex3f(20.0f, -20.0f, 30.0f); // Bottom Front
gl.glVertex3f(20.0f, 20.0f, 30.0f); // Top Front
gl.glPushMatrix();
// Front Wall
gl.glNormal3f(0.0f, 0.0f, 1.0f); // Normal Pointing Away From Viewer
gl.glTexCoord2f(1,1);
gl.glVertex3f(-20.0f, 20.0f, -30.0f); // Top Left
gl.glTexCoord2f(1,0);
gl.glVertex3f(-20.0f, -20.0f, -30.0f); // Bottom Left
gl.glTexCoord2f(0,0);
gl.glVertex3f(20.0f, -20.0f, -30.0f); // Bottom Right
gl.glTexCoord2f(0,1);
gl.glVertex3f(20.0f, 20.0f, -30.0f); // Top Right
gl.glPopMatrix();
gl.glEnd(); // Done Drawing Quads
}
public void reshape(GLAutoDrawable drawable,int xstart,int ystart,int width,int height) {
GL gl = drawable.getGL();
height = (height == 0) ? 1 : height;
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL.GL_PROJECTION);
gl.glLoadIdentity();
gl.glRotatef(90, 0.0f, 0.0f, 1.0f);
glu.gluPerspective(60, (float) width / height, 1, 1000);
glu.gluLookAt(1.0f,0.0f,25.0f,
0.0f,0.0f,0.0f,
0.0f,0.0f,1.0f);
gl.glMatrixMode(GL.GL_MODELVIEW);
gl.glLoadIdentity();
}
public void displayChanged(GLAutoDrawable drawable, boolean modeChanged,boolean deviceChanged) { }
private void loadGLTextures(GLAutoDrawable gldrawable) throws IOException {
TextureReader.Texture texture = null;
texture = TextureReader.readTexture("data/images/04.bmp");
GL gl = gldrawable.getGL();
//Create Nearest Filtered Texture
gl.glGenTextures(1, textures, 0);
gl.glBindTexture(GL.GL_TEXTURE_2D, textures[0]);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR);
gl.glTexImage2D(GL.GL_TEXTURE_2D,
0,
3,
texture.getWidth(),
texture.getHeight(),
0,
GL.GL_RGB,
GL.GL_UNSIGNED_BYTE,
texture.getPixels());
}
public void SetSpotlight(GL gl){
gl.glDisable(GL.GL_LIGHTING);
// prepare spotlight
float spot_ambient[] = {50.2f,0.0f,0.0f,1.0f };//white ={10.2f,10.2f,10.2f,1.0f };
float spot_diffuse[] = {50.8f,0.0f,0.0f,1.0f };
float spot_specular[] = {50.8f,0.0f,0.0f,1.0f };
// set colors here and do the geometry in draw
gl.glLightfv(GL.GL_LIGHT0, GL.GL_AMBIENT, spot_ambient,0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_DIFFUSE, spot_diffuse,0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_SPECULAR, spot_specular,0);
gl.glEnable(GL.GL_LIGHTING);
gl.glEnable(GL.GL_LIGHT0);
// set light position
// since ligth follows the model when mousing
// spotlight as it moves with the scene
float spot_position[] = {0.0f,60.0f,-30.0f,1.0f};
float spot_direction[] = {0.0f,0.0f,-1.0f};
float spot_angle = 10.0f;
gl.glLightfv(GL.GL_LIGHT0, GL.GL_POSITION, spot_position,0);
gl.glLightfv(GL.GL_LIGHT0, GL.GL_SPOT_DIRECTION,spot_direction,0);
gl.glLightf(GL.GL_LIGHT0, GL.GL_SPOT_CUTOFF,(float)spot_angle);
// "smoothing" the border of the lightcone
// change this for effect
gl.glMaterialfv(GL.GL_FRONT, GL.GL_AMBIENT_AND_DIFFUSE, new float[] {0.7f,0.7f,1}, 0 );
gl.glLighti(GL.GL_LIGHT0, GL.GL_SPOT_EXPONENT, 20);
gl.glEnable(GL.GL_LIGHTING);
}
public void SetFloorMaterial(GL gl){
float amb[]={0.3f,0.3f,0.0f,1.0f};
float diff[]={1.0f,1.0f,0.5f,1.0f};
float spec[]={0.6f,0.6f,0.5f,1.0f};
float shine=0.25f;
gl.glMaterialfv(GL.GL_FRONT,GL.GL_AMBIENT,amb,0);
gl.glMaterialfv(GL.GL_FRONT,GL.GL_DIFFUSE,diff,0);
gl.glMaterialfv(GL.GL_FRONT,GL.GL_SPECULAR,spec,0);
gl.glMaterialf(GL.GL_FRONT,GL.GL_SHININESS,shine*128.0f);
}
}
OpenGL's fixed function pipeline does illumination only at vertices and then just interpolates. Since the walls of your rooms are just very large quads, every wall has only 4 vertices, so only 4 lighting calculations happen. For a spotlight effect you'd need a lot more lighting calculations. So you've to refine (i.e. tesselate) the walls' meshes, or use per pixel lighting (i.e. do lighting calculations on the fragment level – this is done using shaders).

Categories

Resources