OpenGL ES Texture not rendering correctly - java

I am working on a Android app with openglES 3.0 and I want to create a obj loader system and display a 3D model with his texture.
My code display correctly a 3D meshe without texture.
If I try to add a texture, it will display the texture and some part of the texture will be empty triangles.
Example :
my meshe with a texture
my meshe obj file
my texture png file
I can not find what is the problem.
My fragment shader
precision mediump float;
uniform vec4 vColor;
uniform sampler2D uTexture;
varying vec2 oTexCoordinate;
void main() {
gl_FragColor = texture2D(uTexture, oTexCoordinate);
//gl_FragColor = vec4(1, 0.5, 0, 1.0);
}
My vertex shader
attribute vec4 position;
uniform mat4 matrix;
attribute vec2 vTexCoordinate;
varying vec2 oTexCoordinate;
void main() {
oTexCoordinate = vTexCoordinate;
gl_Position = matrix * position;
}
Thanks in advance.
Update :
Thanks.
I have changed my code to fit your idea, I have activated the face culling and add a buffer for the normal, I do not use it for now. I put my new code below.
But the problem is not entirely resolved, see the picture below.
I think I need to pass the normal information to the shaders but I am not sure how to do it properly or if it is the solution.
public class MeshLoader {
private int program;
private List<String> facesVertexList;
private List<String> facesTextureList;
private List<String> facesNormalList;
private List<String> verticesList;
private List<String> textureList;
private List<String> normalList;
private FloatBuffer verticesBuffer;
private FloatBuffer verticesBufferTemp;
private FloatBuffer facesVertexBuffer;
private FloatBuffer facesTextureBuffer;
private FloatBuffer facesNormalBuffer;
private FloatBuffer textureBuffer;
private FloatBuffer textureBufferTemp;
private FloatBuffer normalBuffer;
private FloatBuffer normalBufferTemp;
private Context contextMeshLoader;
final int[] textureHandle = new int[1];
public MeshLoader(Context context) {
contextMeshLoader = context;
textureList = new LinkedList<>();
verticesList = new LinkedList<>();
normalList = new LinkedList<>();
facesVertexList = new LinkedList<>();
facesTextureList = new LinkedList<>();
facesNormalList = new LinkedList<>();
openObjFile(0);
String vertexShaderCode = "";
try{
InputStream vertexShaderStream = context.getResources().openRawResource(R.raw.vertex_shader);
vertexShaderCode = IOUtils.toString(vertexShaderStream, Charset.defaultCharset());
vertexShaderStream.close();
}
catch (Exception e){
Log.e("MeshReaderActivity", "Error reading vertex shader", e);
}
String fragmentShaderCode = "";
try{
InputStream fragmentShaderStream = context.getResources().openRawResource(R.raw.fragment_shader);
fragmentShaderCode = IOUtils.toString(fragmentShaderStream, Charset.defaultCharset());
fragmentShaderStream.close();
}
catch(Exception e){
Log.e("MeshReaderActivity", "Error reading fragment shader", e);
}
int vertexShader = GLES30.glCreateShader(GLES30.GL_VERTEX_SHADER);
GLES30.glShaderSource(vertexShader, vertexShaderCode);
int fragmentShader = GLES30.glCreateShader(GLES30.GL_FRAGMENT_SHADER);
GLES30.glShaderSource(fragmentShader, fragmentShaderCode);
GLES30.glCompileShader(vertexShader);
GLES30.glCompileShader(fragmentShader);
program = GLES30.glCreateProgram();
GLES30.glAttachShader(program, vertexShader);
GLES30.glAttachShader(program, fragmentShader);
GLES30.glLinkProgram(program);
GLES30.glUseProgram(program);
}
public void openObjFile(int value)
{
InputStream is;
value = 0;
if(value == 0)
is = contextMeshLoader.getResources().openRawResource(R.raw.objface);
else
is = contextMeshLoader.getResources().openRawResource(R.raw.objship);
if(verticesBufferTemp != null)
verticesBufferTemp.clear();
if(facesVertexBuffer != null)
facesVertexBuffer.clear();
if(textureBuffer != null)
textureBuffer.clear();
if(verticesList != null)
verticesList.clear();
if(facesVertexList != null)
facesVertexList.clear();
if(textureList != null)
textureList.clear();
try{
byte[] buffer = new byte[is.available()];
is.read(buffer);
String data = new String(buffer);
parseData(data);
ByteBuffer buffer2 = ByteBuffer.allocateDirect(facesVertexList.size() * 3 * 4);
buffer2.order(ByteOrder.nativeOrder());
facesVertexBuffer = buffer2.asFloatBuffer();
ByteBuffer buffer3 = ByteBuffer.allocateDirect(facesTextureList.size() * 3 * 4);
buffer3.order(ByteOrder.nativeOrder());
facesTextureBuffer = buffer3.asFloatBuffer();
ByteBuffer buffer6 = ByteBuffer.allocateDirect(facesTextureList.size() * 3 * 4);
buffer6.order(ByteOrder.nativeOrder());
facesNormalBuffer = buffer6.asFloatBuffer();
for(String face: facesVertexList) {
String vertexIndices[] = face.split("\\s+");
float vertex1 = Float.parseFloat(vertexIndices[1]);
float vertex2 = Float.parseFloat(vertexIndices[2]);
float vertex3 = Float.parseFloat(vertexIndices[3]);
facesVertexBuffer.put((vertex1 - 1));
facesVertexBuffer.put((vertex2 - 1));
facesVertexBuffer.put((vertex3 - 1));
}
facesVertexBuffer.position(0);
for(String texture: facesTextureList){
String textureIndice[] = texture.split("\\s+");
float texture1 = Float.parseFloat(textureIndice[1]);
float texture2 = Float.parseFloat(textureIndice[2]);
float texture3 = Float.parseFloat(textureIndice[3]);
facesTextureBuffer.put((texture1 - 1));
facesTextureBuffer.put((texture2 - 1));
facesTextureBuffer.put((texture3 - 1));
}
facesTextureBuffer.position(0);
for(String normal: facesNormalList) {
String normalIndice[] = normal.split("\\s+");
float normal1 = Float.parseFloat(normalIndice[1]);
float normal2 = Float.parseFloat(normalIndice[2]);
float normal3 = Float.parseFloat(normalIndice[3]);
facesNormalBuffer.put((normal1 - 1));
facesNormalBuffer.put((normal2 - 1));
facesNormalBuffer.put((normal3 - 1));
}
facesNormalBuffer.position(0);
ByteBuffer buffer1 = ByteBuffer.allocateDirect(verticesList.size() * 3 * 4);
buffer1.order(ByteOrder.nativeOrder());
verticesBufferTemp = buffer1.asFloatBuffer();
ByteBuffer buffer5 = ByteBuffer.allocateDirect(textureList.size() * 2 * 4);
buffer5.order(ByteOrder.nativeOrder());
textureBufferTemp = buffer5.asFloatBuffer();
ByteBuffer buffer7 = ByteBuffer.allocateDirect(textureList.size() * 3 * 4);
buffer7.order(ByteOrder.nativeOrder());
normalBufferTemp = buffer7.asFloatBuffer();
for(String vertex: verticesList) {
String coords[] = vertex.split("\\s+");
float x = Float.parseFloat(coords[1]);
float y = Float.parseFloat(coords[2]);
float z = Float.parseFloat(coords[3]);
verticesBufferTemp.put(x);
verticesBufferTemp.put(y);
verticesBufferTemp.put(z);
}
verticesBufferTemp.position(0);
for (String texture:textureList)
{
String textureIndices[] = texture.split("\\s+");
float texture1 = Float.parseFloat(textureIndices[1]);
float texture2 = Float.parseFloat(textureIndices[2]);
textureBufferTemp.put(texture1);
textureBufferTemp.put(texture2);
}
textureBufferTemp.position(0);
for (String normal:normalList)
{
String normalIndices[] = normal.split("\\s+");
float normal1 = Float.parseFloat(normalIndices[1]);
float normal2 = Float.parseFloat(normalIndices[2]);
normalBufferTemp.put(normal1);
normalBufferTemp.put(normal2);
}
normalBufferTemp.position(0);
System.out.println("size remaining " + facesVertexBuffer.remaining());
ByteBuffer bufferV = ByteBuffer.allocateDirect(facesVertexBuffer.remaining() * 3 * 4);
bufferV.order(ByteOrder.nativeOrder());
verticesBuffer = bufferV.asFloatBuffer();
ByteBuffer bufferT = ByteBuffer.allocateDirect(facesVertexBuffer.remaining() * 2 * 4);
bufferT.order(ByteOrder.nativeOrder());
textureBuffer = bufferT.asFloatBuffer();
ByteBuffer bufferN = ByteBuffer.allocateDirect(facesVertexBuffer.remaining() * 3 * 4);
bufferN.order(ByteOrder.nativeOrder());
normalBuffer = bufferN.asFloatBuffer();
int size = facesVertexBuffer.remaining();
for(int i = 0; i < size;i++)
{
int faceVertex = Math.round(facesVertexBuffer.get(i)) ;
int faceTexture = Math.round(facesTextureBuffer.get(i));
int faceNormal = Math.round(facesNormalBuffer.get(i));
float x = verticesBufferTemp.get((faceVertex)*3);
float y = verticesBufferTemp.get(((faceVertex)*3)+1);
float z = verticesBufferTemp.get(((faceVertex)*3)+2);
verticesBuffer.put( i*3, x);
verticesBuffer.put( (i*3)+1, y);
verticesBuffer.put( (i*3)+2, z);
float u = textureBufferTemp.get((faceTexture)*2);
float v = -textureBufferTemp.get(((faceTexture)*2)+1);
textureBuffer.put( i*2, u);
textureBuffer.put( (i*2)+1, v);
float xn = normalBufferTemp.get((faceNormal*3));
float yn = normalBufferTemp.get((faceNormal*3)+1);
float zn = normalBufferTemp.get((faceNormal*3)+2);
normalBuffer.put(i*3,xn);
normalBuffer.put((i*3)+1,yn);
normalBuffer.put((i*3)+2,zn);
}
verticesBuffer.position(0);
textureBuffer.position(0);
normalBuffer.position(0);
is.close();
loadTexture();
}
catch (Exception e) {
Log.e("MeshReaderActivity", "Error reading objfile", e);
}
}
public void parseData(String dataToParse)
{
Log.i("parse data method", "parse data method");
String[] data = dataToParse.split("\n");
for (int i = 0;i < data.length;i++)
{
String line = data[i];
if(line.startsWith("v "))
{
// Add vertex line to list of vertices
verticesList.add(line);
}
else if(line.startsWith("vt "))
{
textureList.add(line);
}
else if(line.startsWith("vn "))
{
normalList.add(line);
}
else if(line.startsWith("f "))
{
// Add face line to faces list
triangulate(line);
}
}
}
public void triangulate(String lineToTriangulate)
{
String lineSplit[] = lineToTriangulate.split("\\s+");
if(lineSplit.length > 4)
{
String line1="";
String line2="";
if (lineToTriangulate.contains("/"))
{
line1 = lineSplit[0] + " " + lineSplit[1].split("/")[0] + " " + lineSplit[2].split("/")[0] + " " + lineSplit[3].split("/")[0];
line2 = lineSplit[0] + " " + lineSplit[1].split("/")[0] + " " + lineSplit[2].split("/")[0] + " " + lineSplit[4].split("/")[0];
}
else
{
line1 = lineSplit[0] + " " + lineSplit[1] + " " + lineSplit[2] + " " + lineSplit[3];
line2 = lineSplit[0] + " " + lineSplit[1] + " " + lineSplit[2] + " " + lineSplit[4];
}
facesVertexList.add(line1);
facesVertexList.add(line2);
}
else
{
if(lineToTriangulate.contains("/"))
{
String[] splitElement1 = lineSplit[1].split("/");
String[] splitElement2 = lineSplit[2].split("/");
String[] splitElement3 = lineSplit[3].split("/");
String line = lineSplit[0] + " " + splitElement1[0] + " " + splitElement2[0] + " " + splitElement3[0];
facesVertexList.add(line);
line = lineSplit[0] + " " + splitElement1[1] + " " + splitElement2[1] + " " + splitElement3[1];
facesTextureList.add(line);
line = lineSplit[0] + " " + splitElement1[2] + " " + splitElement2[2] + " " + splitElement3[2];
facesNormalList.add(line);
}
else
{
facesVertexList.add(lineToTriangulate);
}
}
}
public void draw(float scratch[],float zoom){
int position = GLES30.glGetAttribLocation(program, "position");
GLES30.glEnableVertexAttribArray(position);
GLES30.glVertexAttribPointer(position, 3, GLES30.GL_FLOAT, false, 3 * 4, verticesBuffer);
int mTextureUniformHandle = GLES30.glGetUniformLocation(program, "uTexture");
int mTextureCoordinateHandle = GLES30.glGetAttribLocation(program, "vTexCoordinate");
GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
GLES30.glUniform1i(mTextureUniformHandle, 0);
GLES30.glEnableVertexAttribArray(mTextureCoordinateHandle);
GLES30.glVertexAttribPointer(mTextureCoordinateHandle, 2, GLES30.GL_FLOAT, false, 2*4, textureBuffer);
int normalHandle = GLES30.glGetAttribLocation(program,"normal");
GLES30.glEnableVertexAttribArray(normalHandle);
GLES30.glVertexAttribPointer(normalHandle,3,GLES30.GL_FLOAT,false,3*4,normalBuffer);
float[] projectionMatrix = new float[16];
float[] viewMatrix = new float[16];
float[] productMatrix = new float[16];
Matrix.frustumM(projectionMatrix, 0,
-1, 1,
-1, 1,
1, 11);
Matrix.setLookAtM(viewMatrix, 0,
0, 0, zoom,
0, 0, 0,
0, 1, 0);
Matrix.multiplyMM(productMatrix, 0,
projectionMatrix, 0,
viewMatrix, 0);
float[] finalMatrix = new float[16];
Matrix.multiplyMM(finalMatrix, 0,
productMatrix, 0,
scratch, 0);
Matrix.rotateM(finalMatrix, 0, 180, 0.0f, 1.0f, 0.0f);
int matrix = GLES30.glGetUniformLocation(program, "matrix");
GLES30.glUniform1i(matrix,0);
//GLES30.glUniformMatrix4fv(matrix, 1, false, productMatrix, 0);
GLES30.glUniformMatrix4fv(matrix, 1, false, finalMatrix, 0);
int size = facesVertexBuffer.remaining();
GLES30.glEnable(GLES30.GL_CULL_FACE);
GLES30.glCullFace(GLES30.GL_BACK);
GLES30.glDrawArrays(GLES30.GL_TRIANGLES,0,size);
GLES30.glDisableVertexAttribArray(position);
GLES30.glDisableVertexAttribArray(mTextureCoordinateHandle);
}
public void loadTexture()
{
GLES30.glGenTextures(1, textureHandle,0);
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error generating texture name.");
}
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = true; // No pre-scaling
Bitmap bitmap = BitmapFactory.decodeResource(contextMeshLoader.getResources(), R.raw.pngface, options);
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textureHandle[0]);
GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_LINEAR);
GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR);
GLUtils.texImage2D(GLES30.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
}
}
Thanks again.
Update
For the float, it was an error of copy/paste.
For my problem of display I have find the solution.
I just need to add in the onDrawFrame method.
GLES30.glEnable(GLES30.GL_DEPTH_TEST);
Now, My meshe and his texture is correctly displayed.
Thanks for your help.

Your assumption is wrong. The same vertex coordinate can be associated to different texture coordinates.
It the following code you create a new texture coordinate array, that has as many items as the array of vertex coordinate. That would work only, if each vertex coordinate is associated to exactly 1 texture coordinate. In the file are 7536 texture coordinates and 7366 vertex cooodinates.
public void parseTexture()
{
int size = facesVertexBuffer.remaining();
System.out.println("size " + size);
for(int i = 0; i < size;i++)
{
int faceVertex = facesVertexBuffer.get(i);
int faceTexture = facesTextureBuffer.get(i);
float a = textureBufferTemp.get((faceTexture)*2);
float b = -textureBufferTemp.get(((faceTexture)*2)+1);
textureBuffer.put((faceVertex*2),a);
textureBuffer.put(((faceVertex)*2)+1,b);
}
textureBuffer.position(0);
System.out.println("end parse texture");
}
If there are different indices for vertex coordinates and texture coordinates, then vertex positions have to be "duplicated".
The vertex coordinate and its attributes (like texture coordinate) form a data reocord. You can imagine a 3D vertex coordinate and a 2D texture coordinate as a single 5D coordinate.
See Rendering meshes with multiple indices.
The vertex attributes for each vertex position form a set of data. This means you have to create tuples of vertex coordinate, and texture coordiantes.
Let's assume that you have a .obj file like this:
v -1 -1 -1
v 1 -1 -1
v -1 1 -1
v 1 1 -1
v -1 -1 1
v 1 -1 1
v -1 1 1
v 1 1 1
vt 0 0
vt 0 1
vt 1 0
vt 1 1
vn -1 0 0
vn 0 -1 0
vn 0 0 -1
vn 1 0 0
vn 0 1 0
vn 0 0 1
f 3/1/1 1/2/1 5/4/1 7/3/1
f 1/1/2 2/2/2 3/4/2 6/3/2
f 3/1/3 4/2/3 2/4/3 1/3/3
f 2/1/4 4/2/4 8/4/4 6/3/4
f 4/1/5 3/2/5 7/4/5 8/3/5
f 5/1/6 6/2/6 8/4/6 7/3/6
From this you have to find all the combinations of vertex coordinate, texture texture coordinate and normal vector indices, which are used in the face specification:
0 : 3/1/1
1 : 1/2/1
2 : 5/4/1
3 : 7/3/1
4 : 1/1/2
5 : 2/2/2
6 : 3/4/2
7 : 6/3/2
8 : ...
Then you have to create a vertex coordinate, texture coordinate and normal vector array corresponding to the array of combinations indices.
The vertex coordinates and its attributes can either be combined in one array to data sets, or to three arrays with equal number of attributes:
index vx vy vz u v nx ny nz
0 : -1 1 -1 0 0 -1 0 0
1 : -1 -1 -1 0 1 -1 0 0
2 : -1 -1 1 1 1 -1 0 0
3 : -1 1 1 1 0 -1 0 0
4 : -1 -1 -1 0 0 0 -1 0
5 : 1 -1 -1 0 1 0 -1 0
6 : -1 1 -1 1 1 0 -1 0
7 : 1 -1 1 1 0 0 -1 0
8 : ...
Further be aware, the the data type short, which is used for the indices in your application, has a range of [-32768, 32767]. This my be large enough for this model, but the number of indices of larger models will exceed this limit.
The easiest workaround, is, to create an array of triangle primitives. Completely skip the index buffer and use GLES30.glDrawArrays() to draw the mesh.
Red the vertex coordinates and texture coordinates in a temporary buffer.
ByteBuffer bufferVTemp = ByteBuffer.allocateDirect(verticesList.size() * 2 * 4);
bufferVTemp.order(ByteOrder.nativeOrder());
verticesBufferTemp = bufferVTemp.asFloatBuffer();
ByteBuffer bufferTTemp = ByteBuffer.allocateDirect(textureList.size() * 2 * 4);
bufferTTemp.order(ByteOrder.nativeOrder());
textureBufferTemp = bufferTTemp.asFloatBuffer();
for(String vertex: verticesList) {
String coords[] = vertex.split(" "); // Split by space
float x = Float.parseFloat(coords[1]);
float y = Float.parseFloat(coords[2]);
float z = Float.parseFloat(coords[3]);
verticesBufferTemp.put(x);
verticesBufferTemp.put(y);
verticesBufferTemp.put(z);
}
verticesBufferTemp.position(0);
for (String texture: textureList)
{
String textureIndices[] = texture.split("\\s+");
float texture1 = Float.parseFloat(textureIndices[1]);
float texture2 = Float.parseFloat(textureIndices[2]);
textureBufferTemp.put(texture1);
textureBufferTemp.put(texture2);
}
textureBufferTemp.position(0);
Then create an array of tringles
ByteBuffer bufferV = ByteBuffer.allocateDirect(facesVertexBuffer.size() * 3 * 4);
bufferV.order(ByteOrder.nativeOrder());
verticesBuffer = bufferV.asFloatBuffer();
ByteBuffer bufferT = ByteBuffer.allocateDirect(facesVertexBuffer.size() * 2 * 4);
bufferT.order(ByteOrder.nativeOrder());
textureBuffer = bufferT.asFloatBuffer();
int size = facesVertexBuffer.remaining();
System.out.println("size " + size);
for(int i = 0; i < size;i++)
{
int faceVertex = facesVertexBuffer.get(i);
int faceTexture = facesTextureBuffer.get(i);
float x = verticesBufferTemp.get((faceVertex)*2);
float y = verticesBufferTemp.get(((faceVertex)*2)+1);
float z = verticesBufferTemp.get(((faceVertex)*2)+1);
verticesBuffer.put( i*3, x);
verticesBuffer.put( i*3+1, y);
verticesBuffer.put( i*3+2), z);
float u = textureBufferTemp.get((faceTexture)*2);
float v = -textureBufferTemp.get(((faceTexture)*2)+1);
textureBuffer.put( i*2, u);
textureBuffer.put( i*2+1, v);
}
verticesBuffer.position(0);
textureBuffer.position(0);
Draw the mesh by GLES30.glDrawArrays():
GLES30.glDrawElements(GLES30.GL_TRIANGLES, 0, facesVertexBuffer.size());

Related

Understanding stride and offset when interleaving attributes in a vertex buffer

I don't seem to be able to wrap my head around interleaving vertex attributes.
I'm trying to pass 3 attributes to my compute shader: position, velocity, and the number of times it has bounced off something. The code worked just fine before I added the nbr_bounce attribute. Now the data seems to not be aligned as I'm imagining it.
Creates the interleaved list of floats:
class ParticleSystem {
FloatList particleAttribList = new FloatList();
float[] particlesBuffer;
FloatBuffer fbParticles;
int numOfParticles;
ShaderProgram shaderProgram;
ComputeProgram computeProgram;
ParticleSystem(int count) {
numOfParticles = count;
for (int i=0; i<count; i++) {
Particle p = new Particle();
p.pos.x = random(-1, 1);
p.pos.y = random(-1, 1);
p.vel.x = 0.01;
p.vel.y = 0.01;
particleAttribList.append(p.pos.x);
particleAttribList.append(p.pos.y);
particleAttribList.append(p.vel.x);
particleAttribList.append(p.vel.y);
particleAttribList.append(p.nbr_bounces);
}
particlesBuffer = new float[particleAttribList.size()];
for (int i = 0; i<particlesBuffer.length; i++) {
particlesBuffer[i] = particleAttribList.get(i);
}
fbParticles = Buffers.newDirectFloatBuffer(particlesBuffer);
shaderProgram = new ShaderProgram(gl, "vert.glsl", "frag.glsl");
computeProgram = new ComputeProgram(gl, "comp.glsl", fbParticles);
}
Passes the list of floats to the Shader:
ComputeProgram(GL4 gl, String compute, FloatBuffer verticesFB) {
this.gl = gl;
//Load and Compile the Compute Shader
int compute_shader = shaderHelper.createAndCompileShader(gl, GL4.GL_COMPUTE_SHADER, compute);
compute_program = gl.glCreateProgram();
gl.glAttachShader(compute_program, compute_shader);
gl.glLinkProgram(compute_program);
gl.glGenBuffers(1, vbo, 0);
gl.glBindBuffer(GL4.GL_ARRAY_BUFFER, vbo[0]);
gl.glBufferData(GL4.GL_ARRAY_BUFFER, verticesFB.limit()*4, verticesFB, GL4.GL_DYNAMIC_DRAW);
gl.glEnableVertexAttribArray(0);
gl.glEnableVertexAttribArray(1);
gl.glEnableVertexAttribArray(2);
// Since the Particle struct has 2 vec2 variables + 1 int, then the stride is 16 + 4 = 20
// position attribute (no offset)
gl.glVertexAttribPointer(0, 2, GL4.GL_FLOAT, false, 20, 0);
// velocity attribute (with (2*4)=8 offset)
gl.glVertexAttribPointer(1, 2, GL4.GL_FLOAT, false, 20, 8);
// nbr_bounces (with (2*4 + 2*4)=16 offset)
gl.glVertexAttribPointer(2, 1, GL4.GL_FLOAT, false, 20, 16);
ssbo = vbo[0];
gl.glBindBufferBase(GL4.GL_SHADER_STORAGE_BUFFER, 0, ssbo);
}
Compute Shader:
#version 430
struct Particle{
vec2 pos;
vec2 vel;
float nbr_bounces;
};
layout(std430, binding = 0) buffer particlesBuffer
{
Particle particles[];
};
layout(local_size_x = 1024, local_size_y = 1, local_size_z = 1) in;
void main()
{
uint i = gl_GlobalInvocationID.x;
vec2 tmpPos = particles[i].pos + particles[i].vel;
if(abs(tmpPos.x) >= 1.0){
particles[i].vel.x = -1.0 * particles[i].vel.x;
particles[i].nbr_bounces += 1.0;
}
if(abs(tmpPos.y) >= 1.0){
particles[i].vel.y = -1.0 * particles[i].vel.y;
particles[i].nbr_bounces += 1.0;
}
particles[i].pos += particles[i].vel;
}

Android chart with variable line stroke width

I am trying to create Android App with chart with variable line stroke width like this one in the link.
I have tried to realize it with MPAndroidChart. Override drawCubicBezier method in LineChartRenderer class and to set dynamically setStrokeWidth on Paint. But nothing happens. Probably my knowledge is not enough to do it.
Can you help me how to achieve a different line thickness or if there is another chart library that has such functionality?
This is how I set my new Renderer
chart2.setRenderer(new myLineChartRenderer(chart2, chart2.getAnimator(), chart2.getViewPortHandler()));
This is the class
public class myLineChartRenderer extends LineChartRenderer {
public myLineChartRenderer(LineChart chart, ChartAnimator animator, ViewPortHandler viewPortHandler) {
super(chart, animator, viewPortHandler);
}
#Override
protected void drawCubicBezier(ILineDataSet dataSet) {
Path mPath = new Path();
ArrayList<Path> mPaths = new ArrayList<Path>();
mRenderPaint.setAntiAlias(true);
mRenderPaint.setDither(true);
mRenderPaint.setStyle(Paint.Style.STROKE);
mRenderPaint.setStrokeJoin(Paint.Join.ROUND);
mRenderPaint.setStrokeCap(Paint.Cap.ROUND);
//mRenderPaint.setStrokeWidth(STROKE_WIDTH);
float phaseY = mAnimator.getPhaseY();
Transformer trans = mChart.getTransformer(dataSet.getAxisDependency());
mXBounds.set(mChart, dataSet);
float intensity = dataSet.getCubicIntensity();
cubicPath.reset();
if (mXBounds.range >= 1) {
float prevDx = 0f;
float prevDy = 0f;
float curDx = 0f;
float curDy = 0f;
// Take an extra point from the left, and an extra from the right.
// That's because we need 4 points for a cubic bezier (cubic=4), otherwise we get lines moving and doing weird stuff on the edges of the chart.
// So in the starting `prev` and `cur`, go -2, -1
// And in the `lastIndex`, add +1
final int firstIndex = mXBounds.min + 1;
final int lastIndex = mXBounds.min + mXBounds.range;
Entry prevPrev;
Entry prev = dataSet.getEntryForIndex(Math.max(firstIndex - 2, 0));
Entry cur = dataSet.getEntryForIndex(Math.max(firstIndex - 1, 0));
Entry next = cur;
int nextIndex = -1;
if (cur == null) return;
// let the spline start
cubicPath.moveTo(cur.getX(), cur.getY() * phaseY);
mPath.moveTo(cur.getX(), cur.getY() * phaseY);
for (int j = mXBounds.min + 1; j <= mXBounds.range + mXBounds.min; j++) {
prevPrev = prev;
prev = cur;
cur = nextIndex == j ? next : dataSet.getEntryForIndex(j);
nextIndex = j + 1 < dataSet.getEntryCount() ? j + 1 : j;
next = dataSet.getEntryForIndex(nextIndex);
prevDx = (cur.getX() - prevPrev.getX()) * intensity;
prevDy = (cur.getY() - prevPrev.getY()) * intensity;
curDx = (next.getX() - prev.getX()) * intensity;
curDy = (next.getY() - prev.getY()) * intensity;
cubicPath.cubicTo(prev.getX() + prevDx, (prev.getY() + prevDy) * phaseY,
cur.getX() - curDx,
(cur.getY() - curDy) * phaseY, cur.getX(), cur.getY() * phaseY);
mPath.cubicTo(prev.getX() + prevDx, (prev.getY() + prevDy) * phaseY,
cur.getX() - curDx,
(cur.getY() - curDy) * phaseY, cur.getX(), cur.getY() * phaseY);
mPaths.add(mPath);
// Log.i("Curve", (prev.getX() + prevDx)+" | "+ ((prev.getY() + prevDy) * phaseY)+" | "+ (cur.getX() - curDx)+" | "+ ((cur.getY() - curDy) * phaseY)+" | "+ cur.getX()+" | "+ (cur.getY() * phaseY));
}
}
// if filled is enabled, close the path
if (dataSet.isDrawFilledEnabled()) {
cubicFillPath.reset();
cubicFillPath.addPath(cubicPath);
//drawCubicFill(mBitmapCanvas, dataSet, cubicFillPath, trans, mXBounds);
}
mRenderPaint.setColor(dataSet.getColor());
mRenderPaint.setStyle(Paint.Style.STROKE);
trans.pathValueToPixel(cubicPath);
//mBitmapCanvas.drawPath(cubicPath, mRenderPaint);
for(int i=0; i<mPaths.size();i++)
{
mRenderPaint.setStrokeWidth(i+30);
mBitmapCanvas.drawPath(mPaths.get(i), mRenderPaint);
}
mRenderPaint.setPathEffect(null);
}
}
I tried to divide the path into separate parts and change the thickness of the line while drawing them, but the result is totally wrong.

WorldWind Java Google Earth Like Zoom

I've created an input handler for NASA Worldwind that I'm trying to replicate Google Earth like zooming with.
I'm trying to make zoom towards the mouse cursor, instead of the center of the screen (like it does by default).
I've got it somewhat working -- except it doesn't zoom towards the lat/long under the cursor consistently, it seems to drift too far. What I want to happen is that the same lat/long is held under the cursor during the duration of the zoom. So, for instance, if you are hovering the cursor over a particular landmark (like a body of water), it will stay under the cursor as the wheel is scrolled.
The code I'm using is based heavily on this: https://forum.worldwindcentral.com/forum/world-wind-java-forums/development-help/11977-zoom-at-mouse-cursor?p=104793#post104793
Here is my Input Handler:
import java.awt.event.MouseWheelEvent;
import gov.nasa.worldwind.awt.AbstractViewInputHandler;
import gov.nasa.worldwind.awt.ViewInputAttributes;
import gov.nasa.worldwind.geom.Position;
import gov.nasa.worldwind.geom.Vec4;
import gov.nasa.worldwind.view.orbit.BasicOrbitView;
import gov.nasa.worldwind.view.orbit.OrbitViewInputHandler;
public class ZoomToCursorViewInputHandler extends OrbitViewInputHandler {
protected class ZoomActionHandler extends VertTransMouseWheelActionListener {
#Override
public boolean inputActionPerformed(AbstractViewInputHandler inputHandler, MouseWheelEvent mouseWheelEvent,
ViewInputAttributes.ActionAttributes viewAction) {
double zoomInput = mouseWheelEvent.getWheelRotation();
Position position = getView().computePositionFromScreenPoint(mousePoint.x, mousePoint.y);
// Zoom toward the cursor if we're zooming in. Move straight out when zooming
// out.
if (zoomInput < 0 && position != null)
return this.zoomToPosition(position, zoomInput, viewAction);
else
return super.inputActionPerformed(inputHandler, mouseWheelEvent, viewAction);
}
protected boolean zoomToPosition(Position position, double zoomInput,
ViewInputAttributes.ActionAttributes viewAction) {
double zoomChange = zoomInput * getScaleValueZoom(viewAction);
BasicOrbitView view = (BasicOrbitView) getView();
System.out.println("================================");
System.out.println("Center Position: \t\t"+view.getCenterPosition());
System.out.println("Mouse is on Position: \t\t"+position);
Vec4 centerVector = view.getCenterPoint();
Vec4 cursorVector = view.getGlobe().computePointFromLocation(position);
Vec4 delta = cursorVector.subtract3(centerVector);
delta = delta.multiply3(-zoomChange);
centerVector = centerVector.add3(delta);
Position newPosition = view.getGlobe().computePositionFromPoint(centerVector);
System.out.println("New Center Position is: \t"+newPosition);
setCenterPosition(view, uiAnimControl, newPosition, viewAction);
onVerticalTranslate(zoomChange, viewAction);
return true;
}
}
public ZoomToCursorViewInputHandler() {
ViewInputAttributes.ActionAttributes actionAttrs = this.getAttributes()
.getActionMap(ViewInputAttributes.DEVICE_MOUSE_WHEEL)
.getActionAttributes(ViewInputAttributes.VIEW_VERTICAL_TRANSLATE);
actionAttrs.setMouseActionListener(new ZoomActionHandler());
}
}
To enable, set this property in the worldwind.xml to point to this class:
<Property name="gov.nasa.worldwind.avkey.ViewInputHandlerClassName"
value="gov.nasa.worldwindx.examples.ZoomToCursorViewInputHandler"/>
After some thinking over this problem I believe there is no closed form analytical solution for it. You just have to take into account to many things: shape of the Earth, how the "eye" moves when you move the center. So the best trick I think you can do is to "follow" the main "zoom" animation and do small adjustments after each animation step. As animation steps are small, calculation errors should also be smaller and they should accumulate less because on next step you take into account all the previous errors. So my idea in the code is roughly following: create a FixZoomPositionAnimator class as
static class FixZoomPositionAnimator extends BasicAnimator
{
static final String VIEW_ANIM_KEY = "FixZoomPositionAnimator";
static final double EPS = 0.005;
private final java.awt.Point mouseControlPoint;
private final Position mouseGeoLocation;
private final Vec4 mouseGeoPoint;
private final BasicOrbitView orbitView;
private final Animator zoomAnimator;
private int lastDxSign = 0;
private int lastDySign = 0;
int stepNumber = 0;
int stepsNoAdjustments = 0;
FixZoomPositionAnimator(BasicOrbitView orbitView, Animator zoomAnimator, java.awt.Point mouseControlPoint, Position mouseGeoLocation)
{
this.orbitView = orbitView;
this.zoomAnimator = zoomAnimator;
this.mouseControlPoint = mouseControlPoint;
this.mouseGeoLocation = mouseGeoLocation;
mouseGeoPoint = orbitView.getGlobe().computePointFromLocation(mouseGeoLocation);
}
public Point getMouseControlPoint()
{
return mouseControlPoint;
}
public Position getMouseGeoLocation()
{
return mouseGeoLocation;
}
private static int sign(double d)
{
if (Math.abs(d) < EPS)
return 0;
else if (d > 0)
return 1;
else
return -1;
}
double calcAccelerationK(double dSign, double lastDSign)
{
// as we are following zoom trying to catch up - accelerate adjustment
// but slow down if we overshot the target last time
if (!zoomAnimator.hasNext())
return 1.0;
else if (dSign != lastDSign)
return 0.5;
else
{
// reduce acceleration over time
if (stepNumber < 10)
return 5;
else if (stepNumber < 20)
return 3;
else
return 2;
}
}
static boolean isBetween(double checkedValue, double target1, double target2)
{
return ((target1 < checkedValue) && (checkedValue < target2))
|| ((target1 > checkedValue) && (checkedValue > target2));
}
static boolean isValid(Position position)
{
return isBetween(position.longitude.degrees, -180, 180)
&& isBetween(position.latitude.degrees, -90, 90);
}
#Override
public void next()
{
// super.next(); // do not call super to avoid NullPointerException!
nextWithTilt(); // works OK on tilted Earth
// nextOld(); // IMHO better looking but stops working is user tilts the Earth
}
private void nextOld()
{
stepNumber++;
Vec4 curProjection = orbitView.project(mouseGeoPoint);
Rectangle viewport = orbitView.getViewport();
// for Y sign is inverted
double dX = (mouseControlPoint.x - curProjection.x);
double dY = (mouseControlPoint.y + curProjection.y - viewport.getHeight());
if (Math.abs(dX) > EPS || Math.abs(dY) > EPS)
{
double dCX = (mouseControlPoint.x - viewport.getCenterX());
double dCY = (mouseControlPoint.y + viewport.getCenterY() - viewport.getHeight());
final double stepPx = 10;
// As the Earth is curved and we are not guaranteed to have a frontal view on it
// latitude an longitude lines are not really parallel to X or Y. But we assume that
// locally they are parallel enough both around the mousePoint and around the center.
// So we use reference points near center to calculate how we want to move the center.
Vec4 controlPointRight = new Vec4(viewport.getCenterX() + stepPx, viewport.getCenterY());
Vec4 geoPointRight = orbitView.unProject(controlPointRight);
Position positionRight = (geoPointRight != null) ? orbitView.getGlobe().computePositionFromPoint(geoPointRight) : null;
Vec4 controlPointUp = new Vec4(viewport.getCenterX(), viewport.getCenterY() - stepPx);
Vec4 geoPointUp = orbitView.unProject(controlPointUp);
Position positionUp = (geoPointUp != null) ? orbitView.getGlobe().computePositionFromPoint(geoPointUp) : null;
Position centerPosition = orbitView.getCenterPosition();
double newCenterLongDeg;
if (Math.abs(dCX) <= 1.0) // same X => same longitude
{
newCenterLongDeg = mouseGeoLocation.longitude.degrees;
}
else if (positionRight == null) // if controlPointRight is outside of the globe - don't try to fix this coordinate
{
newCenterLongDeg = centerPosition.longitude.degrees;
}
else
{
double scaleX = -dX / stepPx;
// apply acceleration if possible
int dXSign = sign(dX);
double accScaleX = scaleX * calcAccelerationK(dXSign, lastDxSign);
lastDxSign = dXSign;
newCenterLongDeg = centerPosition.longitude.degrees * (1 - accScaleX) + positionRight.longitude.degrees * accScaleX;
// if we overshot - use non-accelerated mode
if (!isBetween(newCenterLongDeg, centerPosition.longitude.degrees, mouseGeoLocation.longitude.degrees)
|| !isBetween(newCenterLongDeg, -180, 180))
{
newCenterLongDeg = centerPosition.longitude.degrees * (1 - scaleX) + positionRight.longitude.degrees * scaleX;
}
}
double newCenterLatDeg;
if (Math.abs(dCY) <= 1.0) // same Y => same latitude
{
newCenterLatDeg = mouseGeoLocation.latitude.degrees;
}
else if (positionUp == null) // if controlPointUp is outside of the globe - don't try to fix this coordinate
{
newCenterLatDeg = centerPosition.latitude.degrees;
}
else
{
double scaleY = -dY / stepPx;
// apply acceleration if possible
int dYSign = sign(dY);
double accScaleY = scaleY * calcAccelerationK(dYSign, lastDySign);
lastDySign = dYSign;
newCenterLatDeg = centerPosition.latitude.degrees * (1 - accScaleY) + positionUp.latitude.degrees * accScaleY;
// if we overshot - use non-accelerated mode
if (!isBetween(newCenterLatDeg, centerPosition.latitude.degrees, mouseGeoLocation.latitude.degrees)
|| !isBetween(newCenterLatDeg, -90, 90))
{
newCenterLatDeg = centerPosition.latitude.degrees * (1 - scaleY) + positionUp.latitude.degrees * scaleY;
}
}
Position newCenterPosition = Position.fromDegrees(newCenterLatDeg, newCenterLongDeg);
orbitView.setCenterPosition(newCenterPosition);
}
if (!zoomAnimator.hasNext())
stop();
}
private void nextWithTilt()
{
stepNumber++;
if (!zoomAnimator.hasNext() || (stepsNoAdjustments > 20))
{
System.out.println("Stop after " + stepNumber);
stop();
}
Vec4 curProjection = orbitView.project(mouseGeoPoint);
Rectangle viewport = orbitView.getViewport();
System.out.println("----------------------------------");
System.out.println("Mouse: mouseControlPoint = " + mouseControlPoint + "\t location = " + mouseGeoLocation + "\t viewSize = " + viewport);
System.out.println("Mouse: curProjection = " + curProjection);
double dX = (mouseControlPoint.x - curProjection.x);
double dY = (viewport.getHeight() - mouseControlPoint.y - curProjection.y); // Y is inverted
Vec4 dTgt = new Vec4(dX, dY);
// sometimes if you zoom close to the edge curProjection is calculated as somewhere
// way beyond where it is and it leads to overflow. This is a protection against it
if (Math.abs(dX) > viewport.width / 4 || Math.abs(dY) > viewport.height / 4)
{
Vec4 unproject = orbitView.unProject(new Vec4(mouseControlPoint.x, viewport.getHeight() - mouseControlPoint.y));
System.out.println("!!!End Mouse:"
+ " dX = " + dX + "\t" + " dY = " + dY
+ "\n" + "unprojectPt = " + unproject
+ "\n" + "unprojectPos = " + orbitView.getGlobe().computePositionFromPoint(unproject)
);
stepsNoAdjustments += 1;
return;
}
if (Math.abs(dX) <= EPS && Math.abs(dY) <= EPS)
{
stepsNoAdjustments += 1;
System.out.println("Mouse: No adjustment: " + " dX = " + dX + "\t" + " dY = " + dY);
return;
}
else
{
stepsNoAdjustments = 0;
}
// create reference points about 10px away from the center to the Up and to the Right
// and then map them to screen coordinates and geo coordinates
// Unfortunately unproject often generates points far from the Earth surface (and
// thus with significantly less difference in lat/long)
// So this longer but more fool-proof calculation is used
final double stepPx = 10;
Position centerPosition = orbitView.getCenterPosition();
Position eyePosition = orbitView.getEyePosition();
double pixelGeoSize = orbitView.computePixelSizeAtDistance(eyePosition.elevation - centerPosition.elevation);
Vec4 geoCenterPoint = orbitView.getCenterPoint();
Vec4 geoRightPoint = geoCenterPoint.add3(new Vec4(pixelGeoSize * stepPx, 0, 0));
Vec4 geoUpPoint = geoCenterPoint.add3(new Vec4(0, pixelGeoSize * stepPx, 0));
Position geoRightPosition = orbitView.getGlobe().computePositionFromPoint(geoRightPoint);
Position geoUpPosition = orbitView.getGlobe().computePositionFromPoint(geoUpPoint);
Vec4 controlCenter = orbitView.project(geoCenterPoint);
Vec4 controlRight = orbitView.project(geoRightPoint);
Vec4 controlUp = orbitView.project(geoUpPoint);
Vec4 controlRightDif = controlRight.subtract3(controlCenter);
controlRightDif = new Vec4(controlRightDif.x, controlRightDif.y); // ignore z for scale calculation
Vec4 controlUpDif = controlUp.subtract3(controlCenter);
controlUpDif = new Vec4(controlUpDif.x, controlUpDif.y); // ignore z for scale calculation
double scaleRight = -dTgt.dot3(controlRightDif) / controlRightDif.getLengthSquared3();
double scaleUp = -dTgt.dot3(controlUpDif) / controlUpDif.getLengthSquared3();
Position posRightDif = geoRightPosition.subtract(centerPosition);
Position posUpDif = geoUpPosition.subtract(centerPosition);
double totalLatDifDeg = posRightDif.latitude.degrees * scaleRight + posUpDif.latitude.degrees * scaleUp;
double totalLongDifDeg = posRightDif.longitude.degrees * scaleRight + posUpDif.longitude.degrees * scaleUp;
Position totalDif = Position.fromDegrees(totalLatDifDeg, totalLongDifDeg);
// don't copy elevation!
Position newCenterPosition = Position.fromDegrees(centerPosition.latitude.degrees + totalLatDifDeg,
centerPosition.longitude.degrees + totalLongDifDeg);
// if we overshot - try to slow down
if (!isValid(newCenterPosition))
{
newCenterPosition = Position.fromDegrees(centerPosition.latitude.degrees + totalLatDifDeg / 2,
centerPosition.longitude.degrees + totalLongDifDeg / 2);
if (!isValid(newCenterPosition))
{
System.out.println("Too much overshot: " + newCenterPosition);
stepsNoAdjustments += 1;
return;
}
}
System.out.println("Mouse:"
+ " dX = " + dX + "\t" + " dY = " + dY
+ "\n"
+ " centerPosition = " + centerPosition
+ "\n"
+ " geoUpPoint = " + geoUpPoint + "\t " + " geoUpPosition = " + geoUpPosition
+ "\n"
+ " geoRightPoint = " + geoRightPoint + "\t " + " geoRightPosition = " + geoRightPosition
+ "\n"
+ " posRightDif = " + posRightDif
+ "\t"
+ " posUpDif = " + posUpDif
+ "\n"
+ " scaleRight = " + scaleRight + "\t" + " scaleUp = " + scaleUp);
System.out.println("Mouse: oldCenterPosition = " + centerPosition);
System.out.println("Mouse: newCenterPosition = " + newCenterPosition);
orbitView.setCenterPosition(newCenterPosition);
}
}
Update
FixZoomPositionAnimator was updated to take into account the fact that one a large scale you can't assume that longitude and latitude lines go parallel to X and Y. To work this around reference points around the center are used to calculate adjustment. This means that the code will not work if the globe size is less than about 20px (2*stepPx) or if the user has tilted the Earth making latitude/longitude significantly non-parallel to X/Y.
End of Update
Update #2
I've moved previous logic to nextOld and added nextWithTilt. The new function should work even if the world is tilted but as the base logic is more complicated now, there is no acceleration anymore which IMHO makes it a bit worse for more typical cases. Also there are still a log of logging inside nextWithTilt because I'm not quite sure it really works OK. Use at your own risk.
End of Update #2
and then you may use it as
public class ZoomToCursorViewInputHandler extends OrbitViewInputHandler
{
public ZoomToCursorViewInputHandler()
{
ViewInputAttributes.ActionAttributes actionAttrs = this.getAttributes()
.getActionMap(ViewInputAttributes.DEVICE_MOUSE_WHEEL)
.getActionAttributes(ViewInputAttributes.VIEW_VERTICAL_TRANSLATE);
actionAttrs.setMouseActionListener(new ZoomActionHandler());
}
protected class ZoomActionHandler extends VertTransMouseWheelActionListener
{
#Override
public boolean inputActionPerformed(AbstractViewInputHandler inputHandler, MouseWheelEvent mouseWheelEvent,
final ViewInputAttributes.ActionAttributes viewAction)
{
double zoomInput = mouseWheelEvent.getWheelRotation();
Position position = wwd.getCurrentPosition();
Point mouseControlPoint = mouseWheelEvent.getPoint();
// Zoom toward the cursor if we're zooming in. Move straight out when zooming
// out.
if (zoomInput < 0 && position != null)
{
boolean res = super.inputActionPerformed(inputHandler, mouseWheelEvent, viewAction);
BasicOrbitView view = (BasicOrbitView) getView();
OrbitViewMoveToZoomAnimator zoomAnimator = (OrbitViewMoveToZoomAnimator) uiAnimControl.get(VIEW_ANIM_ZOOM);
// for continuous scroll preserve the original target if mouse was not moved
FixZoomPositionAnimator old = (FixZoomPositionAnimator) uiAnimControl.get(FixZoomPositionAnimator.VIEW_ANIM_KEY);
if (old != null && old.getMouseControlPoint().equals(mouseControlPoint))
{
position = old.getMouseGeoLocation();
}
FixZoomPositionAnimator fixZoomPositionAnimator = new FixZoomPositionAnimator(view, zoomAnimator, mouseControlPoint, position);
fixZoomPositionAnimator.start();
uiAnimControl.put(FixZoomPositionAnimator.VIEW_ANIM_KEY, fixZoomPositionAnimator);
return res;
}
else
{
uiAnimControl.remove(FixZoomPositionAnimator.VIEW_ANIM_KEY); // when zoom direction changes we don't want to make position adjustments anymore
return super.inputActionPerformed(inputHandler, mouseWheelEvent, viewAction);
}
}
}
// here goes aforementioned FixZoomPositionAnimator
}

Cube texturing in opengl3

Just doing my computer graphics assignment - put texture (600x400 bitmap with different numbers) on a cube to form a proper dice. I managed to do it using "classical" texture mapping: creating verices and adding corresponding texture coordinates to it:
int arrayindex = 0;
float xpos = 0.0f;
float xposEnd = 0.32f;
float ypos = 0.0f;
float yposEnd = 0.49f;
int count = 0;
void quad( int a, int b, int c, int d ) {
colors[arrayindex] = vertex_colors[a];
points[arrayindex] = vertices[a];
tex_coord[arrayindex] = new Point2(xpos, ypos);
arrayindex++;
colors[arrayindex] = vertex_colors[b];
points[arrayindex] = vertices[b];
tex_coord[arrayindex] = new Point2(xpos, yposEnd);
arrayindex++;
colors[arrayindex] = vertex_colors[c];
points[arrayindex] = vertices[c];
tex_coord[arrayindex] = new Point2(xposEnd, yposEnd);
arrayindex++;
colors[arrayindex] = vertex_colors[a];
points[arrayindex] = vertices[a];
tex_coord[arrayindex] = new Point2(xpos, ypos);
arrayindex++;
colors[arrayindex] = vertex_colors[c];
points[arrayindex] = vertices[c];
tex_coord[arrayindex] = new Point2(xposEnd, yposEnd);
arrayindex++;
colors[arrayindex] = vertex_colors[d];
points[arrayindex] = vertices[d];
tex_coord[arrayindex] = new Point2(xposEnd, ypos);
arrayindex++;
xpos = xpos + 0.34f;
xposEnd = xpos + 0.32f;
count++;
if (count == 3) {
xpos = 0.0f;
xposEnd = 0.33f;
ypos = 0.51f;
yposEnd = 1.0f;
}
}
void colorcube() {
quad( 1, 0, 3, 2 );
quad( 2, 3, 7, 6 );
quad( 3, 0, 4, 7 );
quad( 6, 5, 1, 2 );
quad( 5, 4, 0, 1 );
quad( 4, 5, 6, 7 );
pointsBuf = VectorMath.toBuffer(points);
colorsBuf = VectorMath.toBuffer(colors);
texcoord = VectorMath.toBuffer(tex_coord);
}
Passing all this stuff to shaders and just putting it up together.
But reviewing the slides i noticed this method is supposed to be "pre opengl3".
Is there any other method to do this stuff?
In lecture examples i noticed putting it up together in the vertex shader but it was just for a simple 2d plane, not a 3d cube
tex_coords = vec2(vPosition.x+0.5,vPosition.z+0.5);
and later passed to fragment shader to create the texture.
But reviewing the slides i noticed this method is supposed to be "pre opengl3".
I think your slides refer to the old immediate mode. In immediate mode each vertex and its attributes are sent to OpenGL by calling functions that immediately draw them.
In your code however you're initializing a buffer with vertex data. This buffer may then passed as a whole to OpenGL and drawn as a batch by only a single OpenGL call. I wrote "may" because there's not a single OpenGL call in your question.

collision detection doesn't push back

Alright, so I'm working on collision detection for a 3d game, this is what I got so far:
public void mapCol(Spatial map, Node model2){
Mesh m = (Mesh) ((Node) map).getChild("obj_mesh0");
int c = 0;
m.updateWorldBound(true);
boolean col = false;
c = m.getMeshData().getPrimitiveCount(0);
// System.out.println(c);
Vector3[][] v3 = new Vector3[c][3];
for(int s = 0; s < c; s++){
v3[s] = null;
v3[s] = m.getMeshData().getPrimitive(s, 0, v3[s]);
Vector3 min = new Vector3((float)Math.min((float) Math.min(v3[s][0].getXf(), v3[s][1].getXf()), v3[s][2].getXf()),
(float)Math.min((float)Math.min(v3[s][0].getYf(), v3[s][1].getYf()), v3[s][2].getYf()),
(float)Math.min((float)Math.min(v3[s][0].getZf(), v3[s][1].getZf()), v3[s][2].getZf()));
Vector3 max = new Vector3((float) Math.max((float)Math.max(v3[s][0].getXf(), v3[s][1].getXf()), v3[s][2].getXf()),
(float)Math.max((float)Math.max(v3[s][0].getYf(), v3[s][1].getYf()), v3[s][2].getYf()),
(float)Math.max((float)Math.max(v3[s][0].getZf(), v3[s][1].getZf()), v3[s][2].getZf()));
Vector3 v2 = new Vector3();
v2 = max.add(min, v2);
v2.divideLocal(2);
if(max.getXf() > model2.getTranslation().getXf() - sp1.getRadius()&&
min.getXf() < model2.getTranslation().getXf() + sp1.getRadius() &&
max.getZf() > model2.getTranslation().getZf() - sp1.getRadius() &&
min.getZf() < model2.getTranslation().getZf() + sp1.getRadius() &&
max.getYf() > model2.getTranslation().getYf() + sp1.getRadius()&&
!col){
float cosine = (float) v2.dot(v2);
float angle = (float) Math.toDegrees(Math.acos( cosine ));
float pangle = (float) Math.toDegrees(Math.atan2((min.getX() + ((max.getX() - min.getX())/2)) - model2.getTranslation().getX(), (min.getZ() + ((max.getZ() - min.getZ())/2) - model2.getTranslation().getZ())));
if(min.getY() < max.getY()){
System.out.println("pangle:" + pangle + " angle:" + angle);
model2.setTranslation(
(min.getX() + ((max.getX() - min.getX())/2)) - (Math.sin(Math.toRadians(pangle)) * (sp1.getRadius())),
model2.getTranslation().getYf(),
(min.getZ() + ((max.getZ() - min.getZ())/2)) - (-Math.cos(Math.toRadians(pangle)) * (sp1.getRadius()))
);
col = true;
}
}
}
}
Now the part to really look at is right here:
model2.setTranslation(
(min.getX() + ((max.getX() - min.getX())/2)) - (Math.sin(Math.toRadians(pangle)) * (sp1.getRadius())),
model2.getTranslation().getYf(),
(min.getZ() + ((max.getZ() - min.getZ())/2)) - (-Math.cos(Math.toRadians(pangle)) * (sp1.getRadius()))
);
Any idea why it wouldn't set model2 modle2's radius away from the wall? (making it stop at the way and able to go no further)
float cosine = v2.dot(v2)
is intentional ?
Because it just gives you length of v2, squared.
Probably that should be
float cosine = velocity.dot(normalVector)/(velocity.length()*normalVector.length())
, if you wanted cosine of angle between them, but I don't fully understand your code, so I don't know.

Categories

Resources