I'm making an app that tracks exercise movements based on orientation and accelerometer readings(the exercise movements are very slow). What I have is a strategy pattern kind of a situation where I have an abstract class for exercise movement and the concrete exercise movements implement the actual thing. Problem is, I am spawning threads to track different exercises in the onSensorChanged() method in my activity. since this is going to be called a lot of times, I don't know if my code will spawn as many threads. Do they get garbage collected?
Code:
public class WorkoutBuddy extends Activity implements SensorEventListener {
TextView t1, t2, t3, t4, t5, t6, t7;
SensorManager sensorManager;;
private Sensor sensorAccelerometer;
private Sensor sensorMagneticField;
private float[] valuesAccelerometer;
private float[] valuesMagneticField;
private float[] valuesOrientation;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.exercise_buddy);
sensorManager = (SensorManager)getSystemService(SENSOR_SERVICE);
sensorAccelerometer = sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
sensorMagneticField = sensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
valuesAccelerometer = new float[3];
valuesMagneticField = new float[3];
valuesOrientation = new float[3];
matrixR = new float[9];
matrixI = new float[9];
matrixValues = new float[3];
//mediaPlayer = MediaPlayer.create(this, R.raw.first_position_confirmation);
}
#Override
protected void onPause() {
sensorManager.unregisterListener(this,sensorAccelerometer);
sensorManager.unregisterListener(this,sensorMagneticField);
super.onPause();
}
#Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
// TODO Auto-generated method stub
}
float[] orientation;
private float[] matrixR;
private float[] matrixI;
private float[] matrixValues;
#Override
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
valuesAccelerometer = lowPass(event.values.clone(), valuesAccelerometer);
} else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) {
valuesMagneticField = lowPass(event.values.clone(), valuesMagneticField);
}
if (valuesAccelerometer != null && valuesMagneticField != null) {
SensorManager.getRotationMatrix(matrixR, matrixI, valuesAccelerometer, valuesMagneticField);
if(true){
SensorManager.getOrientation(matrixR, matrixValues);
double azimuth = Math.toDegrees(matrixValues[0]);
double pitch = Math.toDegrees(matrixValues[1]);
double roll = Math.toDegrees(matrixValues[2]);
valuesOrientation[0]=(float) pitch;
valuesOrientation[1]=(float) roll;
valuesOrientation[0]=(float) azimuth;
Thread forExc1 = new Thread(new LeftShoulder(valuesAccelerometer, valuesOrientation, this));
Thread forExc2 = new Thread(new RightShoulder(valuesAccelerometer, valuesOrientation, this));
forExc1.run();
forExc2.run();
}
}
}
#Override
protected void onResume() {
sensorManager.registerListener(this,sensorAccelerometer,SensorManager.SENSOR_DELAY_NORMAL);
sensorManager.registerListener(this,sensorMagneticField,SensorManager.SENSOR_DELAY_NORMAL);
super.onResume();
}
//Low pass filter used to smooth the sensor readings
protected float[] lowPass( float[] input, float[] output ) {
float ALPHA = 0.25f;
if ( output == null ) return input;
for ( int i=0; i<input.length; i++ ) {
output[i] = output[i] + ALPHA * (input[i] - output[i]);
}
return output;
}
}
package com.example.msapp2;
public abstract class ExerciseMovement implements Runnable{
protected float[] acc, ori;
protected boolean played = false;
}
package com.example.msapp2;
import android.content.Context;
import android.media.MediaPlayer;
public class LeftShoulder extends ExerciseMovement {
MediaPlayer mediaPlayer;
public LeftShoulder(float[] accelerometer, float[] orientation, Context context){
mediaPlayer = MediaPlayer.create(context, R.raw.first_position_confirmation);
acc = accelerometer;
//this.ori = orientation;
}
public void run(){
if(acc[0]> -10 && acc[0] < -8.5 && !played){
mediaPlayer.start();
played = true;
}
}
}
If you just override OnSensorChanged and output a Log.d , you'll see it's called hundreds, if not thousands, of times per second.
I suggest you the opposite approach: Create just one thread to process in background the different received events, then feed such thread from onSensorChanged.
Implement kind of an event queue in the thread. Assume thousands of events will arrive, constantly.
SOmething like:
private class ShoulderMovementProcessorThread extends Thread {
.....
// this will be called from the UI thread, just add event to the (synchronized) queue.
public void publish (int[] valuesAccelerometer, int[] valuesWhatever) {
add_event_to_queue();
}
// this is the typical event loop where you read one from the queue, process it, then wait for the next
public void run() {
-> get event
-> process event
-> wait for next event
}
}
ShoulderMovementProcessorThread mShoulderProcessor=new ShoulderMovementProcessorThread(...);
#Override
public void onSensorChanged(SensorEvent event) {
decodeEvent (event); // fills up azimuth, roll, etc.
mShoulderProcessor.publish(valuesAccelerometer, valuesWhatever);
}
// decode an event
private void decodeEvent (SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
valuesAccelerometer = lowPass(event.values.clone(), valuesAccelerometer);
} else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) {
valuesMagneticField = lowPass(event.values.clone(), valuesMagneticField);
}
if (valuesAccelerometer != null && valuesMagneticField != null) {
SensorManager.getRotationMatrix(matrixR, matrixI, valuesAccelerometer, valuesMagneticField);
if(true){
SensorManager.getOrientation(matrixR, matrixValues);
double azimuth = Math.toDegrees(matrixValues[0]);
double pitch = Math.toDegrees(matrixValues[1]);
double roll = Math.toDegrees(matrixValues[2]);
valuesOrientation[0]=(float) pitch;
valuesOrientation[1]=(float) roll;
valuesOrientation[0]=(float) azimuth;
}
}
}
I implemented something similar recently:
public class DBWorkerThread implements Runnable
{
private SensorEnum sensorType;
private LinkedBlockingQueue<float[]> sensorData;
private DBService dbService;
public DBWorkerThread(SensorEnum type, DBService dbService)
{
this.sensorType = type;
this.dbService = dbService;
this.sensorData = new LinkedBlockingQueue<float[]>();
}
/**
* Add data to queue
* #param values
*/
public void addDataToProcess(float[] values)
{
if (sensorData.size() < sensorData.remainingCapacity())
{
try
{
this.sensorData.put(values);
}
catch (Exception ex)
{
LogService.log("Error adding queue: " + ex.getMessage());
}
LogService.log("Added to queue. Size: " + sensorData.size());
}
}
/**
* Processes queue of data
*/
#Override
public void run()
{
// Moves the current Thread into the background
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_BACKGROUND);
while (sensorData.size() > 0)
{
try
{
float[] values = sensorData.take();
storeData(values);
}
catch (Exception ex)
{
LogService.log("Error in queue: " + ex.getMessage());
}
}
}
/**
* Store data to database
* #param values
*/
private void storeData(float[] values)
{
// store data
}
}
Hopes this helps
Related
I have a problem with a simple android application. It has a SurfaceView with simple drawing, but activity orientation sometimes seems not to be changed after a screen rotation.
This is how an activity looks in the portrait mode:
landscape mode:
but sometimes, when I rotate the screen, an activity looks like this in the portrait mode:
MainActivity.java:
package com.example.myapplication;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback
{
private Thread mGameThread;
private GameApp mGameApp;
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
SurfaceView mainView = (SurfaceView)findViewById(R.id.mainView);
SurfaceHolder holder = mainView.getHolder();
holder.addCallback(this);
mGameApp = new GameApp(getResources(), holder);
}
#Override
public void surfaceCreated(SurfaceHolder holder)
{
mGameThread = new Thread(new Runnable()
{
#Override
public void run()
{
mGameApp.run();
}
});
mGameThread.start();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
mGameApp.setSurfaceSize(width, height);
}
#Override
public void surfaceDestroyed(SurfaceHolder holder)
{
boolean retry = true;
mGameApp.setRunning(false);
while (retry)
{
try
{
mGameThread.join();
retry = false;
}
catch (InterruptedException e)
{
}
}
}
}
GameApp.java:
package com.example.myapplication;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.view.SurfaceHolder;
public class GameApp
{
private Resources mResources;
private SurfaceHolder mSurfaceHolder;
private int mCanvasHeight = 1;
private int mCanvasWidth = 1;
private volatile boolean mRun = false;
public GameApp(Resources resources, SurfaceHolder surfaceHolder)
{
mResources = resources;
mSurfaceHolder = surfaceHolder;
}
public void setSurfaceSize(int width, int height)
{
synchronized (mSurfaceHolder)
{
mCanvasWidth = width;
mCanvasHeight = height;
}
}
public void run()
{
setRunning(true);
while (mRun)
{
Canvas canvas = null;
try
{
canvas = mSurfaceHolder.lockCanvas(null);
synchronized (mSurfaceHolder)
{
if (mRun && canvas != null)
{
draw(canvas);
}
}
}
finally
{
if (canvas != null)
{
mSurfaceHolder.unlockCanvasAndPost(canvas);
}
}
}
}
public void setRunning(boolean b)
{
mRun = b;
}
private void draw(Canvas canvas)
{
canvas.drawColor(Color.GREEN);
Drawable cellImage = mResources.getDrawable(R.drawable.cell);
final float cellWidth = mCanvasWidth / 6;
final float cellHeight = mCanvasHeight / 6;
for (int i = 0; i < 6; i++)
{
for (int j = 0; j < 6; j++)
{
float x = i * cellWidth;
float y = j * cellHeight;
cellImage.setBounds(Math.round(x), Math.round(y), Math.round(x + cellWidth), Math.round(y + cellHeight));
cellImage.draw(canvas);
}
}
}
}
Eventual answer:
The problem is that you are doing a while loop in your GameApp's thread that locks the canvas and then unlocks without any long blocking or sleep in between.
The SurfaceHolder#lockCanvas documentation states:
If null is not returned, this function internally holds a lock until the corresponding unlockCanvasAndPost(Canvas) call, preventing SurfaceView from creating, destroying, or modifying the surface while it is being drawn.
So this means that the destroy code which is run from main thread needs to run between the unlockCanvasAndPost and the next lockCanvas. But since you have no sleep or even other code in between (except for the while condintion check), the chance of this happening is very small, and - depending on the device - could practically take forever.
To fix this, put a sleep in your game app to achieve the wanted framerate,
in it's most simple from this could look like this.
class GameApp
...
while (mRun)
{
Canvas canvas = null;
try
{
canvas = mSurfaceHolder.lockCanvas(null);
synchronized (mSurfaceHolder)
{
if (mRun && canvas != null)
{
draw(canvas);
}
}
}
finally
{
if (canvas != null)
{
mSurfaceHolder.unlockCanvasAndPost(canvas);
}
}
// Add some sleep time depending on how fast you want to refresh
try {
Thread.sleep(1000/60); //~60 fps
} catch (InterruptedException e) {
e.printStackTrace();
}
}
Original answer 1: What could have helped in case of handling orientation changes
Seems like the surface is not always re-drawn on a config change.
Try overriding the onConfigurationChanged method of the activity and then triggering a re-layout of the surface
in MainActivity.java:
...
#Override
public void onConfigurationChanged(Configuration newConfig) {
// You should save (SurfaceView)findViewById(R.id.mainView) in a field for better performance, but I'm putting it here for shorter code.
SurfaceView mainView = (SurfaceView)findViewById(R.id.mainView);
mainView.invalidate();
mainView.requestLayout();
}
...
More info on these methods nicely explained here:
Usage of forceLayout(), requestLayout() and invalidate()
Original answer 2: A way to check if your threads are blocked
Another possibility is that you have a thread lock issue on your main thread.
For checking that you could change your activity like this:
public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback {
private Thread mGameThread;
private GameApp mGameApp;
private static View currMainView;
private static Thread logPosterThread = new Thread(new Runnable() {
volatile boolean mainLogDone = true;
#Override
public void run() {
Runnable mainThreadLogger = new Runnable() {
#Override
public void run() {
Log.d("THREAD_CHECK", "Main Thread is ok");
mainLogDone = true;
}
};
while (true) {
try {
int sleepTime = 1000;
Thread.sleep(sleepTime);
if (currMainView != null) {
if (mainLogDone) {
mainLogDone = false;
Log.d("THREAD_CHECK", "Main Thread doing post");
currMainView.post(mainThreadLogger);
} else {
Log.w("THREAD_CHECK", "Main Thread did not run the runnable within " + sleepTime + "ms");
mainLogDone = true;
}
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
});
static {
logPosterThread.start();
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final SurfaceView mainView = (SurfaceView) findViewById(R.id.mainView);
currMainView = mainView;
SurfaceHolder holder = mainView.getHolder();
holder.addCallback(this);
mGameApp = new GameApp(getResources(), holder);
}
#Override
public void onPause() {
...
And then see if those logs still get printed with 1000ms in between when the issue happens.
When you try this, you will see that actually the main thread is hanging when this happens.
I am making an app which consists of an activity and a service. By pressing a button the service is started, it collects data in the background from a sensor and classifies it and outputs a string. I want to display the string in a textView. Right now I can see in the log that the variable is updated 2 times every second, but when I try and update the textView from the service class nothing is happening unless I press the button, whenever I press the button, the string is displayed in the textView.
What is the easiest solution here? I tried to make the textView static and it still can't update it. Can you make it so that the view is updated automatically every second? Can I add a listener somehow? Since I am not very experienced I would like an easy solution that does not have to be a "good" one.
Here is my code
Activity:
public class CollectorActivity extends Activity {
private enum State {
IDLE, COLLECTING, TRAINING, CLASSIFYING
};
private final String[] mLabels = { Globals.CLASS_LABEL_STANDING,
Globals.CLASS_LABEL_WALKING, Globals.CLASS_LABEL_RUNNING,
Globals.CLASS_LABEL_OTHER };
private RadioGroup radioGroup;
private final RadioButton[] radioBtns = new RadioButton[4];
private Intent mServiceIntent;
private File mFeatureFile;
public static TextView mCurrentLabel;
private State mState;
private Button btnDelete;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
radioGroup = (RadioGroup) findViewById(R.id.radioGroupLabels);
radioBtns[0] = (RadioButton) findViewById(R.id.radioStanding);
radioBtns[1] = (RadioButton) findViewById(R.id.radioWalking);
radioBtns[2] = (RadioButton) findViewById(R.id.radioRunning);
radioBtns[3] = (RadioButton) findViewById(R.id.radioOther);
btnDelete = (Button) findViewById(R.id.btnDeleteData);
mCurrentLabel = (TextView) findViewById(R.id.textView);
mState = State.IDLE;
mFeatureFile = new File(getExternalFilesDir(null),
Globals.FEATURE_FILE_NAME);
mServiceIntent = new Intent(this, SensorsService.class);
}
public void onCollectClicked(View view) {
if (mState == State.IDLE) {
mState = State.COLLECTING;
((Button) view).setText(R.string.ui_collector_button_stop_title);
btnDelete.setEnabled(false);
radioBtns[0].setEnabled(false);
radioBtns[1].setEnabled(false);
radioBtns[2].setEnabled(false);
radioBtns[3].setEnabled(false);
int acvitivtyId = radioGroup.indexOfChild(findViewById(radioGroup
.getCheckedRadioButtonId()));
String label = mLabels[acvitivtyId];
Bundle extras = new Bundle();
extras.putString(Globals.CLASS_LABEL_KEY, label);
mServiceIntent.putExtras(extras);
startService(mServiceIntent);
} else if (mState == State.COLLECTING) {
mState = State.IDLE;
((Button) view).setText(R.string.ui_collector_button_start_title);
btnDelete.setEnabled(true);
radioBtns[0].setEnabled(true);
radioBtns[1].setEnabled(true);
radioBtns[2].setEnabled(true);
radioBtns[3].setEnabled(true);
stopService(mServiceIntent);
((NotificationManager) getSystemService(NOTIFICATION_SERVICE)).cancelAll();
}
}
public void onDeleteDataClicked(View view) {
if (Environment.MEDIA_MOUNTED.equals(Environment
.getExternalStorageState())) {
if (mFeatureFile.exists()) {
mFeatureFile.delete();
}
Toast.makeText(getApplicationContext(),
R.string.ui_collector_toast_file_deleted,
Toast.LENGTH_SHORT).show();
}
}
#Override
public void onBackPressed() {
if (mState == State.TRAINING) {
return;
} else if (mState == State.COLLECTING || mState == State.CLASSIFYING) {
stopService(mServiceIntent);
((NotificationManager) getSystemService(NOTIFICATION_SERVICE))
.cancel(Globals.NOTIFICATION_ID);
}
super.onBackPressed();
}
#Override
public void onDestroy() {
// Stop the service and the notification.
// Need to check whether the mSensorService is null or not.
if (mState == State.TRAINING) {
return;
} else if (mState == State.COLLECTING || mState == State.CLASSIFYING) {
stopService(mServiceIntent);
((NotificationManager) getSystemService(NOTIFICATION_SERVICE))
.cancelAll();
}
finish();
super.onDestroy();
}
And this is the "doInBackground" method in my service class. The line "CollectorActivity.mCurrentLabel.setText(classification);" is the problem. I want this to update the textView continously.
public class OnSensorChangedTask extends AsyncTask<Void, Void, Void> {
#Override
protected Void doInBackground(Void... arg0) {
Instance inst = new DenseInstance(mFeatLen);
inst.setDataset(mDataset);
Instance inst2 = new DenseInstance(65);
int blockSize = 0;
FFT fft = new FFT(Globals.ACCELEROMETER_BLOCK_CAPACITY);
double[] accBlock = new double[Globals.ACCELEROMETER_BLOCK_CAPACITY];
double[] re = accBlock;
double[] im = new double[Globals.ACCELEROMETER_BLOCK_CAPACITY];
double max = Double.MIN_VALUE;
while (true) {
try {
// need to check if the AsyncTask is cancelled or not in the while loop
if (isCancelled () == true)
{
return null;
}
// Dumping buffer
accBlock[blockSize++] = mAccBuffer.take().doubleValue();
if (blockSize == Globals.ACCELEROMETER_BLOCK_CAPACITY) {
blockSize = 0;
testList = new ArrayList<Double>();
// time = System.currentTimeMillis();
max = .0;
for (double val : accBlock) {
if (max < val) {
max = val;
}
}
fft.fft(re, im);
for (int i = 0; i < re.length; i++) {
double mag = Math.sqrt(re[i] * re[i] + im[i]
* im[i]);
inst.setValue(i, mag);
testList.add(i,mag);
im[i] = .0; // Clear the field
}
// Append max after frequency component
inst.setValue(Globals.ACCELEROMETER_BLOCK_CAPACITY, max);
inst2.setValue(Globals.ACCELEROMETER_BLOCK_CAPACITY, max);
testList.add(max);
classificationIndex = WekaClassifier.classify(testList.toArray());
classification = testLabel.get((int) classificationIndex);
CollectorActivity.mCurrentLabel.setText(classification);
inst.setValue(mClassAttribute, mLabel);
mDataset.add(inst);
Log.i("new instance", mDataset.size() + "");
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
In doInBackground(Void... arg0) change CollectorActivity.mCurrentLabel.setText(classification); to publishProgress(classification); then change second argument from Void to String: public class OnSensorChangedTask extends AsyncTask<Void, Srting, Void> and add onProgressUpdate().
Finally your code should looks like:
public class OnSensorChangedTask extends AsyncTask<Void, Srting, Void> {
#Override
protected Void doInBackground(Void... arg0) {
//...
publishProgress(classification);
//...
}
#Override
protected Void onProgressUpdate(String... classification) {
CollectorActivity.mCurrentLabel.setText(classification[0]);
}
I have a static map, i know center coordinates in latitude and longitude, zoom level and image dimensions.
I don't understood if google static maps are already flattened and latitude and longitude per pixel are the same anywhere. If it's not like that i whant to know how can i calculate latidude and longitude per pixel and then meters per pixel , and how does that values vary with latitude .
P.S. I'm developing an android application that download an static map and computes distance between 2 points , but i've done that asuming the lat and long per pixel are the same ...and doesn't depend on latitude(position on globe)
Here is my mainactivity
package com.mnav.nav;
import android.app.*;
import android.os.*;
import android.view.*;
import android.widget.*;
import android.view.View.*;
import java.net.*;
import org.apache.http.*;
import android.graphics.*;
import java.io.*;
import android.graphics.drawable.*;
public class MainActivity extends Activity
{ Button start, dist;
TextView txt;
EditText lat, longit, zoom;
ImageView img;
Handler mhandler ;
Bitmap bit, original;
Boolean run = false, firstpoint = false, secondpoint = false;
String z, latitude, longitude;
int level;
int[] pointa = new int[2];
int[] pointb = new int[2];
double unit;
int maxx, maxy, minx, miny, distx, disty, distance1;
double distance;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
start=(Button)findViewById(R.id.button);
lat = (EditText)findViewById(R.id.latitude);
longit = (EditText)findViewById(R.id.longitude);
img = (ImageView)findViewById(R.id.img);
zoom =(EditText)findViewById(R.id.zoom);
dist = (Button)findViewById(R.id.dist);
txt = (TextView)findViewById(R.id.txt);
mhandler = new Handler();
start.setOnClickListener(new OnClickListener()
{
#Override
public void onClick(View v)
{ z = zoom.getText().toString();
latitude = lat.getText().toString();
longitude = longit.getText().toString();
Thread thr = new Thread(new GetImage());
thr.start();
}
});
img.setOnTouchListener(new OnTouchListener()
{
#Override
public boolean onTouch(View v, MotionEvent event)
{ int i, j;
int x = (int)event.getX();
int vertical = (int)event.getY();
if(secondpoint==true)
{ bit = original.copy(Bitmap.Config.ARGB_4444, true);
img.setImageBitmap(bit);
secondpoint = false;
}
txt.setText(Integer.toString(x)+ " "+Integer.toString(vertical));
for (i = x-2; i < x+3; i++)
{
for(j=vertical-2; j<vertical+3; j++)
{
bit.setPixel(i,j, Color.WHITE);
}
}
bit.setPixel(x,vertical,Color.GREEN);
img.setImageBitmap(bit);
if(firstpoint)
{
pointb[0] = x; pointb[1]=vertical;
secondpoint = true;
firstpoint = false;
}
else
{
pointa[0]=x; pointa[1]=vertical;
firstpoint=true;
}
return false;
}
});
dist.setOnClickListener(new OnClickListener()
{
#Override
public void onClick(View p1)
{
unit = 0.1412/800;
unit = unit*500;
level = Integer.parseInt(z);
if(level == 20)
{
unit = unit;
}
else
if(level == 19)
{
unit = unit*2;
}
else
if(level==18)
{
unit = unit*4;
}
else
if(level == 17)
{
unit =unit*8;
}
else
if(level==16)
{
unit = unit*16;
}
else
if(level==15)
{
unit = unit*32;
}
else
if(level==14)
{
unit = unit*64;
}
else
if(level==13)
{
unit = unit*128;
}
else
if(level == 12)
{
unit = unit*256;
}
else
if(level==11)
{
unit = unit*512;
}
else
if(level==10)
{
unit = unit*1024;
}
distx = Math.abs(pointa[0]-pointb[0]);
disty= Math.abs(pointa[1]-pointb[1]);
distance1 = distx*distx+disty*disty;
distance = Math.sqrt(distance1);
distance = unit*distance;
txt.setText(Double.toString(distance)+" meters");
}
});
}
public class GetImage implements Runnable
{
String Url = "http://maps.google.com/staticmap?center=" + latitude+","+longitude+"&format=png&zoom="+ z +"&size=500x500&scale=1&maptype=hybrid&key=ABQIAAAA-O3c-Om9OcvXMOJXreXHAxRexG7zW5nSjltmIc1ZE-b8yotBWhQYQEU3J87QIBc4nfuySpoW_K6woA";
Bitmap bmp;
#Override
public void run()
{
try
{ mhandler.post(new Runnable()
{
#Override
public void run()
{
txt.setText("Wait...file is downloading...");
}
});
bmp = BitmapFactory.decodeStream((InputStream)new URL(Url).getContent());
}
catch (IOException e)
{ mhandler.post(new Runnable()
{
#Override
public void run()
{
// TODO: Implement this method
Toast.makeText(getApplicationContext(), "Something wrong just happend", Toast.LENGTH_LONG).show();
}
});
e.printStackTrace();
}
mhandler.post(new Runnable()
{
#Override
public void run()
{
if (bmp!=null)
{ txt.setText("Map downloaded..");
img.setImageBitmap(bmp);
bit = ((BitmapDrawable)img.getDrawable()).getBitmap();
bit = bit.copy(Bitmap.Config.ARGB_4444, true);
original = bit.copy(Bitmap.Config.ARGB_4444, true);
}
else
{
mhandler.post(new Runnable()
{
#Override
public void run()
{
Toast.makeText(getApplicationContext(), "null bitmap, sorry", Toast.LENGTH_LONG).show();
}
});
}
}
});
}
}
}
Using AudioRecord, I have attempted to write a test app to record a couple of seconds of audio to be displayed to the screen. However, I seem to get a repeating pattern of zero value regions as shown below. I'm not sure if this is normal behaviour or an error in my code.
MainActivity.java
public class MainActivity extends Activity implements OnClickListener
{
private static final int SAMPLE_RATE = 44100;
private Button recordButton, playButton;
private String filePath;
private boolean recording;
private AudioRecord record;
private short[] data;
private TestView testView;
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button recordButton = (Button) this.findViewById(R.id.recordButton);
recordButton.setOnClickListener(this);
Button playButton = (Button)findViewById(R.id.playButton);
playButton.setOnClickListener(this);
FrameLayout frame = (FrameLayout)findViewById(R.id.myFrame);
frame.addView(testView = new TestView(this));
}
#Override
public void onClick(View v)
{
if(v.getId() == R.id.recordButton)
{
if(!recording)
{
int bufferSize = AudioRecord.getMinBufferSize( SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
record = new AudioRecord( MediaRecorder.AudioSource.MIC,
SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize * 2);
data = new short[10 * SAMPLE_RATE]; // Records up to 10 seconds
new Thread()
{
#Override
public void run()
{
recordAudio();
}
}.start();
recording = true;
Toast.makeText(this, "recording...", Toast.LENGTH_SHORT).show();
}
else
{
recording = false;
Toast.makeText(this, "finished", Toast.LENGTH_SHORT).show();
}
}
else if(v.getId() == R.id.playButton)
{
testView.invalidate();
Toast.makeText(this, "play/pause", Toast.LENGTH_SHORT).show();
}
}
void recordAudio()
{
record.startRecording();
int index = 0;
while(recording)
{
try {
Thread.sleep(50);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int result = record.read(data, index, SAMPLE_RATE); // read 1 second at a time
if(result == AudioRecord.ERROR_INVALID_OPERATION || result == AudioRecord.ERROR_BAD_VALUE)
{
App.d("SOME SORT OF RECORDING ERROR MATE");
return;
}
else
{
index += result; // increment by number of bytes read
App.d("read: "+result);
}
}
record.stop();
data = Arrays.copyOf(data, index);
testView.setData(data);
}
#Override
protected void onPause()
{
super.onPause();
}
}
TestView.java
public class TestView extends View
{
private short[] data;
Paint paint = new Paint();
Path path = new Path();
float min, max;
public TestView(Context context)
{
super(context);
paint.setColor(Color.BLACK);
paint.setStrokeWidth(1);
paint.setStyle(Style.FILL_AND_STROKE);
}
void setData(short[] data)
{
min = Short.MAX_VALUE;
max = Short.MIN_VALUE;
this.data = data;
for(int i = 0; i < data.length; i++)
{
if(data[i] < min)
min = data[i];
if(data[i] > max)
max = data[i];
}
}
#Override
protected void onDraw(Canvas canvas)
{
canvas.drawRGB(255, 255, 255);
if(data != null)
{
float interval = (float)this.getWidth()/data.length;
for(int i = 0; i < data.length; i+=10)
canvas.drawCircle(i*interval,(data[i]-min)/(max - min)*this.getHeight(),5 ,paint);
}
super.onDraw(canvas);
}
}
Your navigation bar icons make it look like you are probably running on Android 5, and there is a bug in the Android 5.0 release which can cause precisely the problem you are seeing.
Recording to shorts gave an erroneous return value on the L preview, and while substantially reworking the code in the course of fixing that they mistakenly doubled the offset argument in the 5.0 release. Your code increments the index by the (correct) amount it has read in each call, but a pointer math mistake in the audio internals will double the offset you pass, meaning that each period of recording ends up followed by an equal period of unwritten-to buffer, which you see as those gaps of zeroes.
The issue was reported at http://code.google.com/p/android/issues/detail?id=80866
A patch submitted at that time last fall was declined as they said they had already dealt with it internally. Looking at the git history for AOSP 5.1, that would appear to have been internal commit 283a9d9e1 of November 13, which was not yet public when I encountered it later that month. While I haven't tried this on 5.1 yet, it seems like that should fix it, so most likely it is broken from 5.0-5.02 (and in a different way on the L preview) but works correctly with 4.4 and earlier, as well as with 5.1 and later.
The simplest workaround for consistent behavior across broken and unbroken release versions is to avoid ever passing a non-zero offset when recording shorts - that's how I fixed the program where I encountered the problem. A more complicated idea would be to try to figure out if you are on a broken version, and if so halve the passed argument. One method would be to detect the device version, but it's conceivable some vendor or custom ROM 5.0 builds might have been patched, so you could go a step further and do a short recording with a test offset to a zeroed buffer, then scan it to see where the non-zero data actually starts.
Do not pass half the offset to the read-function as suggested in the accepted answer. The offset is an integer and might be an uneven number. This will result in poor audio quality and would be incompatible to android versions other than 5.0.1. and 5.0.2. I used the following work-around, which works for all android versions. I changed:
short[] buffer = new short[frame_size*(frame_rate)];
num = record.read(buffer, offset, frame_size);
into
short[] buffer = new short[frame_size*(frame_rate)];
short[] buffer_bugfix = new short[frame_size];
num = record.read(buffer_bugfix, 0, frame_size);
System.arraycopy(buffer_bugfix, 0, buffer, offset, frame_size);
In words instead of letting the read-function copy the data to the offset position of the large buffer, I let the read-function copy the data to the smaller buffer. I then insert this data manually to the offset position of the large buffer.
I can't check right now your code but I can provide you with some sample code you can test:
private static int channel_config = AudioFormat.CHANNEL_IN_MONO;
private static int format = AudioFormat.ENCODING_PCM_16BIT;
private static int Fs = 16000;
private static int minBufferSize;
private boolean isRecording;
private boolean isProcessing;
private boolean isNewAudioFragment;
private final static int bytesPerSample = 2; // As it is 16bit PCM
private final double amplification = 1.0; // choose a number as you like
private static int frameLength = 512; // number of samples per frame => 32[ms] #Fs = 16[KHz]
private static int windowLength = 16; // number of frames per window => 512[ms] #Fs = 16[KHz]
private static int maxBufferedWindows = 8; // number of buffered windows => 4096 [ms] #Fs = 16[KHz]
private static int bufferSize = frameLength*bytesPerSample;
private static double[] hannWindow = new double[frameLength*bytesPerSample];
private Queue<byte[]> queue = new LinkedList<byte[]>();
private Semaphore semaphoreProcess = new Semaphore(0, true);
private RecordSignal recordSignalThread;
private ProcessSignal processSignalThread;
public static class RecorderSingleton {
public static RecorderSingleton instance = new RecorderSingleton();
private AudioRecord recordInstance = null;
private RecorderSingleton() {
minBufferSize = AudioRecord.getMinBufferSize(Fs, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
while(minBufferSize>bufferSize) {
bufferSize = bufferSize*2;
}
}
public boolean init() {
recordInstance = new AudioRecord(MediaRecorder.AudioSource.MIC, Fs, channel_config, format, bufferSize);
if (recordInstance.getState() != AudioRecord.STATE_INITIALIZED) {
Log.d("audiotestActivity", "Fail to initialize AudioRecord object");
Log.d("audiotestActivity", "AudioRecord.getState()=" + recordInstance.getState());
}
if (recordInstance.getState() == AudioRecord.STATE_UNINITIALIZED) {
return false;
}
return true;
}
public int getBufferSize() {return bufferSize;}
public boolean start() {
if (recordInstance != null && recordInstance.getState() != AudioRecord.STATE_UNINITIALIZED) {
if (recordInstance.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) {
recordInstance.stop();
}
recordInstance.release();
}
if (!init()) {
return false;
}
recordInstance.startRecording();
return true;
}
public int read(byte[] audioBuffer) {
if (recordInstance == null) {
return AudioRecord.ERROR_INVALID_OPERATION;
}
int ret = recordInstance.read(audioBuffer, 0, bufferSize);
return ret;
}
public void stop() {
if (recordInstance == null) {
return;
}
if(recordInstance.getState()==AudioRecord.STATE_UNINITIALIZED) {
Log.d("AudioTest", "instance uninitialized");
return;
}
if(recordInstance.getState()==AudioRecord.STATE_INITIALIZED) {
recordInstance.stop();
recordInstance.release();
}
}
}
public class RecordSignal implements Runnable {
private boolean cancelled = false;
public void run() {
Looper.prepare();
// We're important...android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
int bufferRead = 0;
byte[] inAudioBuffer;
if (!RecorderSingleton.instance.start()) {
return;
}
try {
Log.d("audiotestActivity", "Recorder Started");
while(isRecording) {
inAudioBuffer = null;
inAudioBuffer = new byte[bufferSize];
bufferRead = RecorderSingleton.instance.read(inAudioBuffer);
if (bufferRead == AudioRecord.ERROR_INVALID_OPERATION) {
throw new IllegalStateException("read() returned AudioRecord.ERROR_INVALID_OPERATION");
} else if (bufferRead == AudioRecord.ERROR_BAD_VALUE) {
throw new IllegalStateException("read() returned AudioRecord.ERROR_BAD_VALUE");
}
queue.add(inAudioBuffer);
semaphoreProcess.release();
}
}
finally {
// Close resources...
stop();
}
Looper.loop();
}
public void stop() {
RecorderSingleton.instance.stop();
}
public void cancel() {
setCancelled(true);
}
public boolean isCancelled() {
return cancelled;
}
public void setCancelled(boolean cancelled) {
this.cancelled = cancelled;
}
}
public class ProcessSignal implements Runnable {
public void run() {
Looper.prepare();
//android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_DEFAULT);
while(isProcessing) {
try {
semaphoreProcess.acquire();
byte[] outAudioBuffer = new byte[frameLength*bytesPerSample*(bufferSize/(frameLength*bytesPerSample))];
outAudioBuffer = queue.element();
if(queue.size()>0) {
// do something, process your samples
}
queue.poll();
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
Looper.loop();
}
}
and to start and stop simply:
public void startAudioTest() {
if(recordSignalThread!=null) {
recordSignalThread.stop();
recordSignalThread.cancel();
recordSignalThread = null;
}
if(processSignalThread!=null) {
processSignalThread = null;
}
recordSignalThread = new RecordSignal();
processSignalThread = new ProcessSignal();
new Thread(recordSignalThread).start();
new Thread(processSignalThread).start();
isRecording = true;
isProcessing = true;
}
public void stopAudioTest() {
isRecording = false;
isProcessing = false;
if(processSignalThread!=null) {
processSignalThread = null;
}
if(recordSignalThread!=null) {
recordSignalThread.cancel();
recordSignalThread = null;
}
}
I want to use the accelerometer sensor to keep track when a user makes sudden moves.
This service should be started via activity and keep running indefinitely even if application is terminated (exits).
Currently everything works fine while app is alive, once the app closed, i can still see the service runs but i don't get any signals from him anymore.
Can someone help keep the service alive with signals?
Please look at the code i have.
MainActivity.java
public class MainActivity extends ActionBarActivity implements CordovaInterface {
private boolean mAlternateTitle = false;
private boolean bound;
private boolean volumeupBound;
private boolean volumedownBound;
String TAG = "MainActivity-ActionBarTest";
private IPlugin activityResultCallback;
private Object activityResultKeepRunning;
private Object keepRunning;
CordovaWebView mainView;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
//startService(new Intent(getBaseContext(), FirstService.class));
startService(new Intent(getApplicationContext(), MainAccelerometer.class));
mainView = (CordovaWebView) findViewById(R.id.mainView);
mainView.loadUrl("file:///android_asset/www/index.html");
}
MainAccelerometer.java
public class MainAccelerometer extends Service implements AccelerometerListener{
public int onStartCommand(Intent intent, int flags, int startId) {
return START_NOT_STICKY;
}
public IBinder onBind(Intent arg0)
{
return null;
}
public void onCreate() {
super.onCreate();
//Check device supported Accelerometer senssor or not
if (AccelerometerManager.isSupported(getApplicationContext())) {
//Start Accelerometer Listening
AccelerometerManager.startListening(this);
}
}
public void onAccelerationChanged(float x, float y, float z) {
// TODO Auto-generated method stub
}
public void onShake(float force) {
// Called when Motion Detected
//Toast.makeText(getBaseContext(), "Motion detected", Toast.LENGTH_SHORT).show();
Log.d("Test", "shake");
}
#Override
public void onDestroy() {
super.onDestroy();
Log.i("Sensor", "Service distroy");
//Check device supported Accelerometer senssor or not
if (AccelerometerManager.isListening()) {
//Start Accelerometer Listening
AccelerometerManager.stopListening();
//Toast.makeText(getBaseContext(), "onDestroy Accelerometer Stoped", Toast.LENGTH_LONG).show();
}
}
}
AccelerometerManager.java
public class AccelerometerManager {
private static Context aContext=null;
/** Accuracy configuration */
private static float threshold = 20.0f;
private static int interval = 2000;
private static Sensor sensor;
private static SensorManager sensorManager;
// you could use an OrientationListener array instead
// if you plans to use more than one listener
private static AccelerometerListener listener;
/** indicates whether or not Accelerometer Sensor is supported */
private static Boolean supported;
/** indicates whether or not Accelerometer Sensor is running */
private static boolean running = false;
/**
* Returns true if the manager is listening to orientation changes
*/
public static boolean isListening() {
return running;
}
/**
* Unregisters listeners
*/
public static void stopListening() {
running = false;
try {
if (sensorManager != null && sensorEventListener != null) {
sensorManager.unregisterListener(sensorEventListener);
}
} catch (Exception e) {}
}
/**
* Returns true if at least one Accelerometer sensor is available
*/
public static boolean isSupported(Context context) {
aContext = context;
if (supported == null) {
if (aContext != null) {
sensorManager = (SensorManager) aContext.
getSystemService(Context.SENSOR_SERVICE);
// Get all sensors in device
List<Sensor> sensors = sensorManager.getSensorList(
Sensor.TYPE_ACCELEROMETER);
supported = new Boolean(sensors.size() > 0);
} else {
supported = Boolean.FALSE;
}
}
return supported;
}
/**
* Configure the listener for shaking
* #param threshold
* minimum acceleration variation for considering shaking
* #param interval
* minimum interval between to shake events
*/
public static void configure(int threshold, int interval) {
AccelerometerManager.threshold = threshold;
AccelerometerManager.interval = interval;
}
/**
* Registers a listener and start listening
* #param accelerometerListener
* callback for accelerometer events
*/
public static void startListening( AccelerometerListener accelerometerListener )
{
sensorManager = (SensorManager) aContext.
getSystemService(Context.SENSOR_SERVICE);
// Take all sensors in device
List<Sensor> sensors = sensorManager.getSensorList(
Sensor.TYPE_ACCELEROMETER);
if (sensors.size() > 0) {
sensor = sensors.get(0);
// Register Accelerometer Listener
running = sensorManager.registerListener(
sensorEventListener, sensor,
SensorManager.SENSOR_DELAY_GAME);
listener = accelerometerListener;
}
}
/**
* Configures threshold and interval
* And registers a listener and start listening
* #param accelerometerListener
* callback for accelerometer events
* #param threshold
* minimum acceleration variation for considering shaking
* #param interval
* minimum interval between to shake events
*/
public static void startListening(
AccelerometerListener accelerometerListener,
int threshold, int interval) {
configure(threshold, interval);
startListening(accelerometerListener);
}
/**
* The listener that listen to events from the accelerometer listener
*/
private static SensorEventListener sensorEventListener =
new SensorEventListener() {
private long now = 0;
private long timeDiff = 0;
private long lastUpdate = 0;
private long lastShake = 0;
private float x = 0;
private float y = 0;
private float z = 0;
private float lastX = 0;
private float lastY = 0;
private float lastZ = 0;
private float force = 0;
public void onAccuracyChanged(Sensor sensor, int accuracy) {}
public void onSensorChanged(SensorEvent event) {
// use the event timestamp as reference
// so the manager precision won't depends
// on the AccelerometerListener implementation
// processing time
now = event.timestamp;
x = event.values[0];
y = event.values[1];
z = event.values[2];
// if not interesting in shake events
// just remove the whole if then else block
if (lastUpdate == 0) {
lastUpdate = now;
lastShake = now;
lastX = x;
lastY = y;
lastZ = z;
Toast.makeText(aContext,"No Motion detected", Toast.LENGTH_SHORT).show();
} else {
timeDiff = now - lastUpdate;
if (timeDiff > 0) {
/*force = Math.abs(x + y + z - lastX - lastY - lastZ)
/ timeDiff;*/
//force = Math.abs(x + y + z - lastX - lastY - lastZ);
force = Math.abs(x - lastX);
if (Float.compare(force, threshold) >0 ) {
//Toast.makeText(Accelerometer.getContext(), (now-lastShake)+" >= "+interval, 1000).show();
if (now - lastShake >= interval) {
// trigger shake event
listener.onShake(force);
}
else
{
//Toast.makeText(aContext,"No Motion detected", Toast.LENGTH_SHORT).show();
}
lastShake = now;
}
lastX = x;
lastY = y;
lastZ = z;
lastUpdate = now;
}
else
{
//Toast.makeText(aContext,"No Motion detected", Toast.LENGTH_SHORT).show();
}
}
// trigger change event
listener.onAccelerationChanged(x, y, z);
}
};
}
AccelerometerListener.java
public interface AccelerometerListener {
public void onAccelerationChanged(float x, float y, float z);
public void onShake(float force);
}