I am creating an Android image classification application using Tensorflow. When I tried to run the CameraActivity.java, the activity crashes. I have refered to this link to develop my app https://github.com/MindorksOpenSource/AndroidTensorFlowMachineLearningExample. I am not sure what is wrong, so may i know how should I solve this issue?
Logcat error: Could not load vendor/lib/egl/libGLES_mali_v2.so from sphal namespace: dlopen failed: library "vendor/lib/egl/libGLES_mali_v2.so" not found.
Debug Error:
FATAL EXCEPTION: pool-1-thread-1
Process: com.example.nicholas.herb_recognition_test, PID: 15136
java.lang.RuntimeException: Error initializing TensorFlow!
at CameraActivity$5.run(CameraActivity.java:122)
Debug Error Line : classifier = TensorFlowImageClassifier.create
Error: java.util.concurrent.ThreadPoolExecutor.runWorker
CameraActivity.java
public class CameraActivity extends AppCompatActivity {
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
cameraView = (CameraView) findViewById(R.id.cameraView);
imageViewResult = (ImageView) findViewById(R.id.imageViewResult);
textViewResult = (TextView) findViewById(R.id.textViewResult);
textViewResult.setMovementMethod(new ScrollingMovementMethod());
btnToggleCamera = (Button) findViewById(R.id.btnToggleCamera);
btnDetectObject = (Button) findViewById(R.id.btnDetectObject);
cameraView.setCameraListener(new CameraListener() {
#Override
public void onPictureTaken(byte[] picture) {
super.onPictureTaken(picture);
Bitmap bitmap = BitmapFactory.decodeByteArray(picture, 0, picture.length);
bitmap = Bitmap.createScaledBitmap(bitmap, INPUT_SIZE, INPUT_SIZE, false);
imageViewResult.setImageBitmap(bitmap);
final List<Classifier.Recognition> results = classifier.recognizeImage(bitmap);
textViewResult.setText(results.toString());
}
});
btnToggleCamera.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
cameraView.toggleFacing();
}
});
btnDetectObject.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
cameraView.captureImage();
}
});
initTensorFlowAndLoadModel();
}
private void initTensorFlowAndLoadModel() {
executor.execute(new Runnable() {
#Override
public void run() {
try {
classifier = TensorFlowImageClassifier.create(
getAssets(),
MODEL_FILE,
LABEL_FILE,
INPUT_SIZE,
IMAGE_MEAN,
IMAGE_STD,
INPUT_NAME,
OUTPUT_NAME);
makeButtonVisible();
} catch (final Exception e) {
throw new RuntimeException("Error initializing TensorFlow!", e);
}
}
});
}
}
TensorFlowImageClassifier.java
public class TensorFlowImageClassifier implements Classifier {
public static Classifier create(
AssetManager assetManager,
String modelFilename,
String labelFilename,
int inputSize,
int imageMean,
float imageStd,
String inputName,
String outputName)
throws IOException {
TensorFlowImageClassifier c = new TensorFlowImageClassifier();
c.inputName = inputName;
c.outputName = outputName;
// Read the label names into memory.
// TODO(andrewharp): make this handle non-assets.
String actualFilename = labelFilename.split("file:///android_asset/")[1];
Log.i(TAG, "Reading labels from: " + actualFilename);
BufferedReader br = null;
br = new BufferedReader(new InputStreamReader(assetManager.open(actualFilename)));
String line;
while ((line = br.readLine()) != null) {
c.labels.add(line);
}
br.close();
c.inferenceInterface = new TensorFlowInferenceInterface();
if (c.inferenceInterface.initializeTensorFlow(assetManager, modelFilename) != 0) {
throw new RuntimeException("TF initialization failed");
}
// The shape of the output is [N, NUM_CLASSES], where N is the batch size.
int numClasses =
(int) c.inferenceInterface.graph().operation(outputName).output(0).shape().size(1);
Log.i(TAG, "Read " + c.labels.size() + " labels, output layer size is " + numClasses);
// Ideally, inputSize could have been retrieved from the shape of the input operation. Alas,
// the placeholder node for input in the graphdef typically used does not specify a shape, so it
// must be passed in as a parameter.
c.inputSize = inputSize;
c.imageMean = imageMean;
c.imageStd = imageStd;
// Pre-allocate buffers.
c.outputNames = new String[]{outputName};
c.intValues = new int[inputSize * inputSize];
c.floatValues = new float[inputSize * inputSize * 3];
c.outputs = new float[numClasses];
return c;
}
#Override
public List<Recognition> recognizeImage(final Bitmap bitmap) {
// Log this method so that it can be analyzed with systrace.
Trace.beginSection("recognizeImage");
Trace.beginSection("preprocessBitmap");
// Preprocess the image data from 0-255 int to normalized float based
// on the provided parameters.
bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
for (int i = 0; i < intValues.length; ++i) {
final int val = intValues[i];
floatValues[i * 3 + 0] = (((val >> 16) & 0xFF) - imageMean) / imageStd;
floatValues[i * 3 + 1] = (((val >> 8) & 0xFF) - imageMean) / imageStd;
floatValues[i * 3 + 2] = ((val & 0xFF) - imageMean) / imageStd;
}
Trace.endSection();
// Copy the input data into TensorFlow.
Trace.beginSection("fillNodeFloat");
inferenceInterface.fillNodeFloat(
inputName, new int[]{1, inputSize, inputSize, 3}, floatValues);
Trace.endSection();
// Run the inference call.
Trace.beginSection("runInference");
inferenceInterface.runInference(outputNames);
Trace.endSection();
// Copy the output Tensor back into the output array.
Trace.beginSection("readNodeFloat");
inferenceInterface.readNodeFloat(outputName, outputs);
Trace.endSection();
// Find the best classifications.
PriorityQueue<Recognition> pq =
new PriorityQueue<Recognition>(
3,
new Comparator<Recognition>() {
#Override
public int compare(Recognition lhs, Recognition rhs) {
// Intentionally reversed to put high confidence at the head of the queue.
return Float.compare(rhs.getConfidence(), lhs.getConfidence());
}
});
for (int i = 0; i < outputs.length; ++i) {
if (outputs[i] > THRESHOLD) {
pq.add(
new Recognition(
"" + i, labels.size() > i ? labels.get(i) : "unknown", outputs[i], null));
}
}
final ArrayList<Recognition> recognitions = new ArrayList<Recognition>();
int recognitionsSize = Math.min(pq.size(), MAX_RESULTS);
for (int i = 0; i < recognitionsSize; ++i) {
recognitions.add(pq.poll());
}
Trace.endSection(); // "recognizeImage"
return recognitions;
}
#Override
public void enableStatLogging(boolean debug) {
inferenceInterface.enableStatLogging(debug);
}
#Override
public String getStatString() {
return inferenceInterface.getStatString();
}
#Override
public void close() {
inferenceInterface.close();
}
}
It seems that there are some compatibility issues of my model file with the architecture model implemented in the TensorFlowImageClassifier.java.
just change the INPUT_NAME and OUTPUT_NAME to:
private static final String INPUT_NAME = "Mul";
private static final String OUTPUT_NAME = "final_result";
It worked for me
Related
I have a piece of code that compares to images and places a marker on the difference. So far it works well, except the latest marker layer that is added always shows underneath all the older markers. I have the latest one as a yellow color and the older ones as red. When the difference is close to one of the red markers, the yellow marker shows behind those ones.
Is there anyone that can help me get the yellow (Latest marker) to appear on top?
This is my code so far:
public class CheckmarkActivity extends AppCompatActivity implements ZoomLayout.OnZoomableLayoutClickEventListener {
TextView tv;
RelativeLayout relativeLayout_work;
ImageView imageViewtest;
Bitmap prevBmp = null;
Timer t;
TimerTask task;
int time = 100;
float image_Width;
float image_Height;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_checkmark);
if (getResources().getBoolean(R.bool.is_tablet)) {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
tv = findViewById(R.id.tv);
relativeLayout_work = findViewById(R.id.relativeLayout_work);
imageViewtest = findViewById(R.id.imageViewtest);
prevBmp = ViewcontrollerActivity.workSession.getLastScreenShot();
if (prevBmp == null || ViewcontrollerActivity.workSession.workScreenShot == null) {
setResult(Activity.RESULT_CANCELED);
finish();
}
startTimer();
}
// image compare
class TestAsync extends AsyncTask<Object, Integer, String>
{
String TAG = getClass().getSimpleName();
PointF p;
Bitmap test_3;
protected void onPreExecute (){
super.onPreExecute();
Log.d(TAG + " PreExceute","On pre Exceute......");
}
protected String doInBackground(Object...arg0) {
test_3 = ImageHelper.findDifference(CheckmarkActivity.this, prevBmp, ViewcontrollerActivity.workSession.workScreenShot);
p = ImageHelper.findShot(test_3);
time = 1;
return "You are at PostExecute";
}
protected void onProgressUpdate(Integer...a){
super.onProgressUpdate(a);
}
protected void onPostExecute(String result) {
super.onPostExecute(result);
addImageToImageview();
PointF np = Session.convertPointBitmap2View(p, relativeLayout_work, ViewcontrollerActivity.workSession.workScreenShot);
tv.setX(np.x - tv.getWidth() / 2);
tv.setY(np.y - tv.getHeight() / 2);
tv.setVisibility(View.VISIBLE);
// imageViewtest.setImageBitmap(test_3);
}
}
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i("OpenCV", "OpenCV loaded successfully");
new TestAsync().execute();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
#Override
protected void onResume() {
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d("OpenCV", "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d("OpenCV", "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public static int[] getBitmapOffset(ImageView img, Boolean includeLayout) {
int[] offset = new int[2];
float[] values = new float[9];
Matrix m = img.getImageMatrix();
m.getValues(values);
offset[0] = (int) values[5];
offset[1] = (int) values[2];
if (includeLayout) {
ViewGroup.MarginLayoutParams lp = (ViewGroup.MarginLayoutParams) img.getLayoutParams();
int paddingTop = (int) (img.getPaddingTop() );
int paddingLeft = (int) (img.getPaddingLeft() );
offset[0] += paddingTop + lp.topMargin;
offset[1] += paddingLeft + lp.leftMargin;
}
return offset;
}
public static int[] getBitmapPositionInsideImageView(ImageView imageView) {
int[] ret = new int[4];
if (imageView == null || imageView.getDrawable() == null)
return ret;
// Get image dimensions
// Get image matrix values and place them in an array
float[] f = new float[9];
imageView.getImageMatrix().getValues(f);
// Extract the scale values using the constants (if aspect ratio maintained, scaleX == scaleY)
final float scaleX = f[Matrix.MSCALE_X];
final float scaleY = f[Matrix.MSCALE_Y];
// Get the drawable (could also get the bitmap behind the drawable and getWidth/getHeight)
final Drawable d = imageView.getDrawable();
final int origW = d.getIntrinsicWidth();
final int origH = d.getIntrinsicHeight();
// Calculate the actual dimensions
final int actW = Math.round(origW * scaleX);
final int actH = Math.round(origH * scaleY);
ret[2] = actW;
ret[3] = actH;
// Get image position
// We assume that the image is centered into ImageView
int imgViewW = imageView.getWidth();
int imgViewH = imageView.getHeight();
int top = (int) (imgViewH - actH)/2;
int left = (int) (imgViewW - actW)/2;
ret[0] = left;
ret[1] = top;
return ret;
}
private void addImageToImageview(){
if (ViewcontrollerActivity.workSession.workScreenShot != null) {
imageViewtest.setImageBitmap(ViewcontrollerActivity.workSession.workScreenShot);
Log.d("width", String.valueOf(imageViewtest.getWidth()));
}
Resources r = getResources();
float px = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 20, r.getDisplayMetrics());
for (int i = 0; i < ViewcontrollerActivity.workSession.getShotCount(); i++) {
PointF p = ViewcontrollerActivity.workSession.getPoint(i);
TextView t = new TextView(this);
t.setText("" + (i + 1));
RelativeLayout.LayoutParams param = new RelativeLayout.LayoutParams((int)px, (int)px);
relativeLayout_work.addView(t);
t.setLayoutParams(param);
t.setGravity(Gravity.CENTER);
t.setBackgroundResource(R.drawable.circle);
p = Session.convertPointBitmap2View(p, relativeLayout_work, ViewcontrollerActivity.workSession.workScreenShot);
t.setX(p.x);
t.setY(p.y);
t.setTag(10000 + i);
}
}
public void onConfirm(View v){
View vv = findViewById(R.id.relativeLayout_work);
PointF bp = Session.convertPointView2Bitmap(new PointF(tv.getX(), tv.getY()), relativeLayout_work, ViewcontrollerActivity.workSession.workScreenShot);
ViewcontrollerActivity.workSession.addNewShot(ViewcontrollerActivity.workSession.workScreenShot, bp);
setResult(Activity.RESULT_OK);
finish();
}
public void onCancel(View v){
setResult(Activity.RESULT_CANCELED);
finish();
}
#Override
public void onBackPressed() {
setResult(Activity.RESULT_CANCELED);
finish();
}
#Override
public void OnContentClickEvent(int action, float xR, float yR) {
int[] offset = new int[2];
int[] rect = new int[4];
offset = this.getBitmapOffset(imageViewtest, false);
int original_width = imageViewtest.getDrawable().getIntrinsicWidth();
int original_height = imageViewtest.getDrawable().getIntrinsicHeight();
rect = getBitmapPositionInsideImageView(imageViewtest);
Log.i("OffsetY", String.valueOf(offset[0]));
Log.i("OffsetX", String.valueOf(offset[1]));
Log.i( "0", String.valueOf(rect[0]));
Log.i( "1", String.valueOf(rect[1]));
Log.i( "2", String.valueOf(rect[2]));
Log.i( "3", String.valueOf(rect[3]));
if (xR > rect[0] && xR < rect[0] + rect[2] && yR > rect[1] && yR < rect[1] + rect[3]) {
tv.setX(xR - tv.getWidth() / 2);
tv.setY(yR - tv.getHeight() / 2);
}
// tv.setX(xR - tv.getWidth() / 2);
// tv.setY(yR - tv.getHeight() / 2);
}
public void onMoveButtonPressed(View v) {
ImageButton b = (ImageButton)v;
int mId = b.getId();
switch (mId) {
case R.id.imageButtonL:
tv.setX(tv.getX() - 1);
break;
case R.id.imageButtonR:
tv.setX(tv.getX() + 1);
break;
case R.id.imageButtonD:
tv.setY(tv.getY() + 1);
break;
case R.id.imageButtonU:
tv.setY(tv.getY() - 1);
break;
}
}
//timer change image
public void startTimer(){
t = new Timer();
task = new TimerTask() {
#Override
public void run() {
runOnUiThread(new Runnable() {
#Override
public void run() {
if (time == 1){
imageViewtest.setImageBitmap(ViewcontrollerActivity.workSession.workScreenShot);
// tv.setVisibility(View.VISIBLE);
tv.setText("" + (ViewcontrollerActivity.workSession.getShotCount() + 1));
t.cancel();
return;
}
if (time % 2 == 0) {
imageViewtest.setImageBitmap(prevBmp);
}
else if(time % 2 == 1){
imageViewtest.setImageBitmap(ViewcontrollerActivity.workSession.workScreenShot);
}
time --;
}
});
}
};
t.scheduleAtFixedRate(task, 0, 500);
}
}
You can give the z-order of the child view with addView() function.
void addView (View child, int index)
ex)
private void addImageToImageview(){
if (ViewcontrollerActivity.workSession.workScreenShot != null) {
imageViewtest.setImageBitmap(ViewcontrollerActivity.workSession.workScreenShot);
Log.d("width", String.valueOf(imageViewtest.getWidth()));
}
Resources r = getResources();
float px = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 20, r.getDisplayMetrics());
int currChildrenCount = relativeLayout_work.getChildCount();
for (int i = 0; i < ViewcontrollerActivity.workSession.getShotCount(); i++) {
PointF p = ViewcontrollerActivity.workSession.getPoint(i);
TextView t = new TextView(this);
t.setText("" + (i + 1));
RelativeLayout.LayoutParams param = new RelativeLayout.LayoutParams((int)px, (int)px);
relativeLayout_work.addView(t, currChildrenCount+i); // You can control the order like this
t.setLayoutParams(param);
t.setGravity(Gravity.CENTER);
t.setBackgroundResource(R.drawable.circle);
p = Session.convertPointBitmap2View(p, relativeLayout_work, ViewcontrollerActivity.workSession.workScreenShot);
t.setX(p.x);
t.setY(p.y);
t.setTag(10000 + i);
}
}
In the app im getting WIFI List<ScanResults> data and trying to update the Chart but something is not going well , and the app throw exception java.lang.IndexOutOfBoundsException: Invalid index 0, size is 0 , i have tried to many code combination from MpAndroidChart but nothing solve the issue ...
Why it is happening ?
Broadcast Receiver for WiFi ScanResults :
public class Receiver extends BroadcastReceiver {
#SuppressWarnings("ConstantConditions")
#Override
public void onReceive(Context context, Intent intent) {
new Thread(new Runnable() {
#Override
public void run() {
try {
List<ScanResult> results = mWifiManager.getScanResults();
ArrayList<String> lista_bssid_e_skanuar = new ArrayList<>();
ArrayList<String> lista_bssid_lista_skanuar = new ArrayList<>();
//boolean channel_width_supported = false;
for (ScanResult result1 : results) {
final String SSID = result1.SSID;
final String channel = String.valueOf(ieee80211_frequency_to_channel(result1.frequency));
final String frequency = String.valueOf(result1.frequency);
final String BSSID = result1.BSSID;
final String capabilities = result1.capabilities;
final String signal = String.valueOf(result1.level);
String security = "FREE";
int channel_width;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
//channel_width_supported = true;
int freq1 = result1.centerFreq0;
int freq2 = result1.centerFreq1;
int channel_width_freq;
if (freq1 - freq2 < 0) {
channel_width_freq = freq2 - freq1;
} else {
channel_width_freq = freq1 - freq2;
}
if ((Integer.parseInt(frequency) - channel_width_freq) <= 0) {
channel_width = channel_width_freq - Integer.parseInt(frequency);
} else {
channel_width = Integer.parseInt(frequency) - channel_width_freq;
}
if (channel_width != 20 && channel_width != 40 && channel_width == 60 && channel_width != 80 && channel_width != 160) {
channel_width = 21;
}
} else {
//channel_width_supported = false;
channel_width = 21;
}
final int finalChannel_width = channel_width;
if (capabilities.contains("WPA")) {
security = "WPA";
}
if (capabilities.contains("WPA2")) {
security = "WPA2";
}
if (capabilities.contains("WEP")) {
security = "WEP";
}
final String finalSecurity = security;
if (!BSSID.equals("00:00:00:00:00:00")) {
lista_bssid_e_skanuar.add(SSID + "/" + channel + "/" + frequency + "/" + BSSID + "/" + capabilities + "/" + signal + "/" + finalSecurity + "/" + finalChannel_width);
lista_bssid_lista_skanuar.add(BSSID);
}
}
synchronized (_lock) {
for (int a = 0; a < lista_bssid_e_skanuar.size(); a++) {
String[] TE_DHENAT = lista_bssid_e_skanuar.get(a).split("/");
final String SSID_LISTA = TE_DHENAT[0];
final String CHANNEL_LISTA = TE_DHENAT[1];
final String BSSID_LISTA = TE_DHENAT[3];
final String SIGNAL_LISTA = TE_DHENAT[5];
final int kanali = Integer.parseInt(CHANNEL_LISTA);
int signal = Integer.parseInt(SIGNAL_LISTA) + 100;
if (signal > 70) {
signal = 70;
}
if (!lista_bssid_lista.contains(BSSID_LISTA)) {
lista_bssid_lista.add(BSSID_LISTA);
addEntry(kanali - 2, 0, a, SSID_LISTA);
addEntry(kanali, signal, a, SSID_LISTA);
addEntry(kanali + 2, 0, a, SSID_LISTA);
} else {
for (int i = 0; i < lista_bssid_lista.size(); i++) {
if (!lista_bssid_lista_skanuar.contains(lista_bssid_lista.get(i))) {
//MAYBE EXCEPTION HAPPENING HERE ??
final int finalSignal1 = signal;
final int finalI = i;
removeEntry(kanali, finalSignal1, finalI);
addEntry(kanali - 2, 0, finalI, SSID_LISTA);
addEntry(kanali, 0, finalI, SSID_LISTA);
addEntry(kanali + 2, 0, finalI, SSID_LISTA);
} else {
// OR HERE ????
final int finalSignal = signal;
final int finalA = a;
removeEntry(kanali, finalSignal, finalA);
addEntry(kanali - 2, 0, finalA, SSID_LISTA);
addEntry(kanali, finalSignal, finalA, SSID_LISTA);
addEntry(kanali + 2, 0, finalA, SSID_LISTA);
}
}
}
}
}
}catch (Exception i){
Thread.currentThread().interrupt();
}
}
}).start();
}
}
Add Entry Method :
private synchronized void addEntry(float kanali, float sinjali, int indexi, String emri) {
LineData data = mwifichart.getData();
ILineDataSet set = data.getDataSetByIndex(indexi);
if (set == null) {
set = createSet(emri);
data.addDataSet(set);
}
data.addEntry(new Entry(kanali, sinjali), indexi);
data.notifyDataChanged();
mwifichart.notifyDataSetChanged();
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
mwifichart.invalidate();
}
});
}
Remove Entry Method :
private synchronized void removeEntry(float x_value,float y_value, int indexi) {
try {
LineData data = mwifichart.getData();
if (data != null) {
ILineDataSet set = data.getDataSetByIndex(indexi);
if (set != null) {
data.removeEntry(x_value - 2, indexi);
data.removeEntry(x_value + 0, indexi);
data.removeEntry(x_value + 2, indexi);
data.notifyDataChanged();
mwifichart.notifyDataSetChanged();
}
}
}catch (Exception I){ TastyToast.makeText(getContext(),I.getMessage(),TastyToast.LENGTH_SHORT,TastyToast.ERROR);
}
}
I'm getting this Exception and the app crash instantly , tried to many code combination but nothing ...
java.lang.IndexOutOfBoundsException: Invalid index 0, size is 0
at java.util.ArrayList.throwIndexOutOfBoundsException(ArrayList.java:260)
at java.util.ArrayList.get(ArrayList.java:313)
at com.github.mikephil.charting.data.DataSet.getEntryForIndex(DataSet.java:286)
at com.github.mikephil.charting.utils.Transformer.generateTransformedValuesLine(Transformer.java:184)
at com.github.mikephil.charting.renderer.LineChartRenderer.drawValues(LineChartRenderer.java:547)
at com.github.mikephil.charting.charts.BarLineChartBase.onDraw(BarLineChartBase.java:263)
UPDATE :
It seems like the exception is not happening if the code runs on UI THREAD , but not sure about this , just it don't throws exception anymore ???
Example
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
removeEntry(kanali, finalSignal, finalA);
addEntry(kanali - 2, 0, finalA, SSID_LISTA);
addEntry(kanali, finalSignal, finalA, SSID_LISTA);
addEntry(kanali + 2, 0, finalA, SSID_LISTA);
}
});
The issue is followed on the Github project here:
https://github.com/PhilJay/MPAndroidChart/issues/2450
It seems to be happening only for line charts.
The possible solution seems to be to clear the chart if there are no values to display:
if (values.isEmpty()) {
chart.clear();
} else {
// set data
chart.setData(data);
}
I have the same problem and will deploy this fix soon, I will update if this fix is not working.
I had the same problem but I just clear the chart before to put any data.
PieChart pieChart = view.findViewById(R.id.PieChart);
pieChart.clear();
You should custom a simple ChartRenderer avoid drawing while chart is empty.
Then,you add it to your chart.
chart.setRenderer(new CustomLineChartRenderer(chart,chart.getAnimator(),chart.getViewPortHandler()));
public class CustomLineChartRenderer extends LineChartRenderer {
public CustomLineChartRenderer(LineDataProvider chart, ChartAnimator animator, ViewPortHandler viewPortHandler) {
super(chart, animator, viewPortHandler);
}
#Override
protected boolean shouldDrawValues(IDataSet set) {
return super.shouldDrawValues(set) && set.getEntryCount() > 0;
}
}
In your add entry method mwifichart.getdata() is returning null.
And you are trying to get value from that list. Please put a check on that statement.
If(data !=null && data.size >=indexi)
ILineDataSet data =data.getSetDataByIndex(indexi)
Using AudioRecord, I have attempted to write a test app to record a couple of seconds of audio to be displayed to the screen. However, I seem to get a repeating pattern of zero value regions as shown below. I'm not sure if this is normal behaviour or an error in my code.
MainActivity.java
public class MainActivity extends Activity implements OnClickListener
{
private static final int SAMPLE_RATE = 44100;
private Button recordButton, playButton;
private String filePath;
private boolean recording;
private AudioRecord record;
private short[] data;
private TestView testView;
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button recordButton = (Button) this.findViewById(R.id.recordButton);
recordButton.setOnClickListener(this);
Button playButton = (Button)findViewById(R.id.playButton);
playButton.setOnClickListener(this);
FrameLayout frame = (FrameLayout)findViewById(R.id.myFrame);
frame.addView(testView = new TestView(this));
}
#Override
public void onClick(View v)
{
if(v.getId() == R.id.recordButton)
{
if(!recording)
{
int bufferSize = AudioRecord.getMinBufferSize( SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
record = new AudioRecord( MediaRecorder.AudioSource.MIC,
SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize * 2);
data = new short[10 * SAMPLE_RATE]; // Records up to 10 seconds
new Thread()
{
#Override
public void run()
{
recordAudio();
}
}.start();
recording = true;
Toast.makeText(this, "recording...", Toast.LENGTH_SHORT).show();
}
else
{
recording = false;
Toast.makeText(this, "finished", Toast.LENGTH_SHORT).show();
}
}
else if(v.getId() == R.id.playButton)
{
testView.invalidate();
Toast.makeText(this, "play/pause", Toast.LENGTH_SHORT).show();
}
}
void recordAudio()
{
record.startRecording();
int index = 0;
while(recording)
{
try {
Thread.sleep(50);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int result = record.read(data, index, SAMPLE_RATE); // read 1 second at a time
if(result == AudioRecord.ERROR_INVALID_OPERATION || result == AudioRecord.ERROR_BAD_VALUE)
{
App.d("SOME SORT OF RECORDING ERROR MATE");
return;
}
else
{
index += result; // increment by number of bytes read
App.d("read: "+result);
}
}
record.stop();
data = Arrays.copyOf(data, index);
testView.setData(data);
}
#Override
protected void onPause()
{
super.onPause();
}
}
TestView.java
public class TestView extends View
{
private short[] data;
Paint paint = new Paint();
Path path = new Path();
float min, max;
public TestView(Context context)
{
super(context);
paint.setColor(Color.BLACK);
paint.setStrokeWidth(1);
paint.setStyle(Style.FILL_AND_STROKE);
}
void setData(short[] data)
{
min = Short.MAX_VALUE;
max = Short.MIN_VALUE;
this.data = data;
for(int i = 0; i < data.length; i++)
{
if(data[i] < min)
min = data[i];
if(data[i] > max)
max = data[i];
}
}
#Override
protected void onDraw(Canvas canvas)
{
canvas.drawRGB(255, 255, 255);
if(data != null)
{
float interval = (float)this.getWidth()/data.length;
for(int i = 0; i < data.length; i+=10)
canvas.drawCircle(i*interval,(data[i]-min)/(max - min)*this.getHeight(),5 ,paint);
}
super.onDraw(canvas);
}
}
Your navigation bar icons make it look like you are probably running on Android 5, and there is a bug in the Android 5.0 release which can cause precisely the problem you are seeing.
Recording to shorts gave an erroneous return value on the L preview, and while substantially reworking the code in the course of fixing that they mistakenly doubled the offset argument in the 5.0 release. Your code increments the index by the (correct) amount it has read in each call, but a pointer math mistake in the audio internals will double the offset you pass, meaning that each period of recording ends up followed by an equal period of unwritten-to buffer, which you see as those gaps of zeroes.
The issue was reported at http://code.google.com/p/android/issues/detail?id=80866
A patch submitted at that time last fall was declined as they said they had already dealt with it internally. Looking at the git history for AOSP 5.1, that would appear to have been internal commit 283a9d9e1 of November 13, which was not yet public when I encountered it later that month. While I haven't tried this on 5.1 yet, it seems like that should fix it, so most likely it is broken from 5.0-5.02 (and in a different way on the L preview) but works correctly with 4.4 and earlier, as well as with 5.1 and later.
The simplest workaround for consistent behavior across broken and unbroken release versions is to avoid ever passing a non-zero offset when recording shorts - that's how I fixed the program where I encountered the problem. A more complicated idea would be to try to figure out if you are on a broken version, and if so halve the passed argument. One method would be to detect the device version, but it's conceivable some vendor or custom ROM 5.0 builds might have been patched, so you could go a step further and do a short recording with a test offset to a zeroed buffer, then scan it to see where the non-zero data actually starts.
Do not pass half the offset to the read-function as suggested in the accepted answer. The offset is an integer and might be an uneven number. This will result in poor audio quality and would be incompatible to android versions other than 5.0.1. and 5.0.2. I used the following work-around, which works for all android versions. I changed:
short[] buffer = new short[frame_size*(frame_rate)];
num = record.read(buffer, offset, frame_size);
into
short[] buffer = new short[frame_size*(frame_rate)];
short[] buffer_bugfix = new short[frame_size];
num = record.read(buffer_bugfix, 0, frame_size);
System.arraycopy(buffer_bugfix, 0, buffer, offset, frame_size);
In words instead of letting the read-function copy the data to the offset position of the large buffer, I let the read-function copy the data to the smaller buffer. I then insert this data manually to the offset position of the large buffer.
I can't check right now your code but I can provide you with some sample code you can test:
private static int channel_config = AudioFormat.CHANNEL_IN_MONO;
private static int format = AudioFormat.ENCODING_PCM_16BIT;
private static int Fs = 16000;
private static int minBufferSize;
private boolean isRecording;
private boolean isProcessing;
private boolean isNewAudioFragment;
private final static int bytesPerSample = 2; // As it is 16bit PCM
private final double amplification = 1.0; // choose a number as you like
private static int frameLength = 512; // number of samples per frame => 32[ms] #Fs = 16[KHz]
private static int windowLength = 16; // number of frames per window => 512[ms] #Fs = 16[KHz]
private static int maxBufferedWindows = 8; // number of buffered windows => 4096 [ms] #Fs = 16[KHz]
private static int bufferSize = frameLength*bytesPerSample;
private static double[] hannWindow = new double[frameLength*bytesPerSample];
private Queue<byte[]> queue = new LinkedList<byte[]>();
private Semaphore semaphoreProcess = new Semaphore(0, true);
private RecordSignal recordSignalThread;
private ProcessSignal processSignalThread;
public static class RecorderSingleton {
public static RecorderSingleton instance = new RecorderSingleton();
private AudioRecord recordInstance = null;
private RecorderSingleton() {
minBufferSize = AudioRecord.getMinBufferSize(Fs, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
while(minBufferSize>bufferSize) {
bufferSize = bufferSize*2;
}
}
public boolean init() {
recordInstance = new AudioRecord(MediaRecorder.AudioSource.MIC, Fs, channel_config, format, bufferSize);
if (recordInstance.getState() != AudioRecord.STATE_INITIALIZED) {
Log.d("audiotestActivity", "Fail to initialize AudioRecord object");
Log.d("audiotestActivity", "AudioRecord.getState()=" + recordInstance.getState());
}
if (recordInstance.getState() == AudioRecord.STATE_UNINITIALIZED) {
return false;
}
return true;
}
public int getBufferSize() {return bufferSize;}
public boolean start() {
if (recordInstance != null && recordInstance.getState() != AudioRecord.STATE_UNINITIALIZED) {
if (recordInstance.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) {
recordInstance.stop();
}
recordInstance.release();
}
if (!init()) {
return false;
}
recordInstance.startRecording();
return true;
}
public int read(byte[] audioBuffer) {
if (recordInstance == null) {
return AudioRecord.ERROR_INVALID_OPERATION;
}
int ret = recordInstance.read(audioBuffer, 0, bufferSize);
return ret;
}
public void stop() {
if (recordInstance == null) {
return;
}
if(recordInstance.getState()==AudioRecord.STATE_UNINITIALIZED) {
Log.d("AudioTest", "instance uninitialized");
return;
}
if(recordInstance.getState()==AudioRecord.STATE_INITIALIZED) {
recordInstance.stop();
recordInstance.release();
}
}
}
public class RecordSignal implements Runnable {
private boolean cancelled = false;
public void run() {
Looper.prepare();
// We're important...android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
int bufferRead = 0;
byte[] inAudioBuffer;
if (!RecorderSingleton.instance.start()) {
return;
}
try {
Log.d("audiotestActivity", "Recorder Started");
while(isRecording) {
inAudioBuffer = null;
inAudioBuffer = new byte[bufferSize];
bufferRead = RecorderSingleton.instance.read(inAudioBuffer);
if (bufferRead == AudioRecord.ERROR_INVALID_OPERATION) {
throw new IllegalStateException("read() returned AudioRecord.ERROR_INVALID_OPERATION");
} else if (bufferRead == AudioRecord.ERROR_BAD_VALUE) {
throw new IllegalStateException("read() returned AudioRecord.ERROR_BAD_VALUE");
}
queue.add(inAudioBuffer);
semaphoreProcess.release();
}
}
finally {
// Close resources...
stop();
}
Looper.loop();
}
public void stop() {
RecorderSingleton.instance.stop();
}
public void cancel() {
setCancelled(true);
}
public boolean isCancelled() {
return cancelled;
}
public void setCancelled(boolean cancelled) {
this.cancelled = cancelled;
}
}
public class ProcessSignal implements Runnable {
public void run() {
Looper.prepare();
//android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_DEFAULT);
while(isProcessing) {
try {
semaphoreProcess.acquire();
byte[] outAudioBuffer = new byte[frameLength*bytesPerSample*(bufferSize/(frameLength*bytesPerSample))];
outAudioBuffer = queue.element();
if(queue.size()>0) {
// do something, process your samples
}
queue.poll();
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
Looper.loop();
}
}
and to start and stop simply:
public void startAudioTest() {
if(recordSignalThread!=null) {
recordSignalThread.stop();
recordSignalThread.cancel();
recordSignalThread = null;
}
if(processSignalThread!=null) {
processSignalThread = null;
}
recordSignalThread = new RecordSignal();
processSignalThread = new ProcessSignal();
new Thread(recordSignalThread).start();
new Thread(processSignalThread).start();
isRecording = true;
isProcessing = true;
}
public void stopAudioTest() {
isRecording = false;
isProcessing = false;
if(processSignalThread!=null) {
processSignalThread = null;
}
if(recordSignalThread!=null) {
recordSignalThread.cancel();
recordSignalThread = null;
}
}
I have two activity one is for take an image and another is process image. but i found the problem that when i take a photo for more than one time. The picture that is processed is still an the first one. why it not change picture. and how i can fix it.
this is a first activity for take an image
public class CaptureCamera extends Activity {
Camera mCamera = null;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.layout_start);
mCamera = getCameraInstance();
Preview mPreview = new Preview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.cam_preview);
preview.addView(mPreview);
Display display = getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
int screenCenterX = (size.x /4);
int screenCenterY = (size.y/6) ;
//Adding listener
ImageView captureButton = (ImageView) findViewById(R.id.button_camera);
captureButton.setOnClickListener(
new View.OnClickListener() {
#Override
public void onClick(View v) {
mCamera.takePicture(null, null, mPicture);
}
});
//Adding listener
ImageView backButton = (ImageView) findViewById(R.id.button_back);
backButton.setOnClickListener(
new View.OnClickListener() {
#Override
public void onClick(View v) {
Intent intent1 = new Intent(CaptureCamera.this, MainMenuActivity.class);
finish();
startActivity(intent1);
}
});
//Adding listener
ImageView nextButton = (ImageView) findViewById(R.id.button_next);
nextButton.setOnClickListener(
new View.OnClickListener() {
#Override
public void onClick(View v) {
Intent intent2 = new Intent(CaptureCamera.this, ProcessPic.class);
finish();
startActivity(intent2);
}
});
}
private Camera getCameraInstance() {
Camera camera = null;
try {
camera = Camera.open();
} catch (Exception e) {
// cannot get camera or does not exist
}
return camera;
}
PictureCallback mPicture = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile();
if (pictureFile == null){
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
Toast.makeText(CaptureCamera.this, "Photo saved to folder \"sdcard\\DCIM\\CameraSnap\"", Toast.LENGTH_SHORT).show();
} catch (FileNotFoundException e) {
} catch (IOException e) {
}
}
};
private static File getOutputMediaFile(){
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DCIM), "CameraSnap");
if (! mediaStorageDir.exists()){
if (! mediaStorageDir.mkdirs()){
Log.d("CameraSnap", "failed to create directory");
return null;
}
};
File mediaFile;
mediaFile = new File(mediaStorageDir.getPath() + File.separator +"IMG_0"+".jpg");
return mediaFile;
}
}
class Preview extends SurfaceView implements SurfaceHolder.Callback {
SurfaceHolder mHolder;
Camera mCamera;
Preview(Context context, Camera camera) {
super(context);
// Install a SurfaceHolder.Callback so we get notified when
this.mCamera = camera;
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
//this is a deprecated method, is not required after 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell
// to draw.
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the
// Because the CameraDevice object is not a shared resource,
// important to release it when the activity is paused.
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// Now that the size is known, set up the camera parameters
// the preview.
Camera.Parameters parameters = mCamera.getParameters();
List<Camera.Size> previewSizes = parameters.getSupportedPreviewSizes();
// You need to choose the most appropriate previewSize for your app
Camera.Size previewSize = previewSizes.get(0);
parameters.setPreviewSize(previewSize.width, previewSize.height);
mCamera.setParameters(parameters);
mCamera.startPreview();
}
}
Here is Process Activity that will get image from DCIM/capturesnap and calculate HSV color
But when i have take second picture and then click button to this activity. the output (HSV color) that is shown is still belong to the old picture.
How i can fix it, in order to process picture in the last picture not the old one.
public class ProcessPic extends Activity {
public static ArrayList<Double> HueValue = new ArrayList<Double>(9);
public static int xImage,yImage,red,green,blue,RR,Y,B;
public static float[] hsv = new float[3];
public static Bitmap myBitmapPic,myBitmapPic1;
public static double a,b,r,std_err = 0.0;
public static double e;
public static int N;
//Variable for Vmode
public static int Rred,Ggreen,Bblue, maxCount;
static float maxValue;
public static float[] hsvMode = new float[3];
public static int[] ModeValue = new int[17415];//rare data
public static double[] HueValueMode = new double[17415];//rare data
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.layout_process);
TextView tv1 = (TextView)findViewById(R.id.textView1);
String path = Environment.getExternalStorageDirectory()+
File imgFile = new File(path);
myBitmapPic1 = BitmapFactory.decodeFile(imgFile.getAbsolutePath());
//resize
Bitmap myBitmapPic = null;
myBitmapPic = Bitmap.createScaledBitmap(myBitmapPic1, 2560, 1920, true);
ImageView myImage = (ImageView) findViewById(R.id.imageAdd);
myImage.setImageBitmap(myBitmapPic);
//0
ProcessPic h1ppm = new ProcessPic();
h1ppm.AverageColor(myBitmapPic, 244,395,1198,1388);
HueValue.add((double) hsv[0]);
ProcessPic model = new ProcessPic();
model.Regression(x, y);
tv1.setText(hsv[0]);
//Adding listener
ImageView backButton = (ImageView) findViewById(R.id.imageBack);
backButton.setOnClickListener(
new View.OnClickListener() {
#Override
public void onClick(View v) {
Intent intent = new Intent(ProcessPic.this, MainMenuActivity.class);
finish();
startActivity(intent);
}
});
//Adding listener
ImageView nextButton = (ImageView) findViewById(R.id.imageSave);
nextButton.setOnClickListener(
new View.OnClickListener() {
#Override
public void onClick(View v) {
Intent intent1 = new Intent(ProcessPic.this, AddStudent.class);
finish();
startActivity(intent1);
}
});
}
public static void AverageColor (Bitmap myBitmap,int minw, int maxw,int minh, int maxh){//for master color
red = 0;
green = 0;
blue = 0;
int count = 0;
for (int i=minw;i<maxw;i++){
for (int j=minh;j<maxh;j++){
int pixel = myBitmap.getPixel(i,j);
red += pixel >> 16 & 0xFF;
green += pixel >> 8 & 0xFF;
blue += pixel & 0xFF;
count++;
}
}
red /= count;
green /= count;
blue /= count;
//---------------------convert RGB to HSV----------------//
int avgRed = red;
int avgGreen = green;
int avgBlue = blue;
Color.RGBToHSV(avgRed,avgGreen,avgBlue,hsv);
//float hue = hsv[0];
//float saturate = hsv[1];
//float brightness = hsv[2];
}
public static void ValueArray (Bitmap myBitmap,int minw, int maxw,int minh, int maxh) {//for หลุม
Rred = 0;
Ggreen = 0;
Bblue = 0;
int count = 0;
for (int i=minw;i<maxw;i++){
for (int j=minh;j<maxh;j++){
int pixel = myBitmap.getPixel(i,j);
Rred = pixel >> 16 & 0xFF;
Ggreen = pixel >> 8 & 0xFF;
Bblue = pixel & 0xFF;
ProcessPic.RGBtoHSV(Rred, Ggreen, Bblue, hsvMode);
//Color.RGBToHSV(Rred,Ggreen,Bblue,hsvMode);
//create V array
//ModeValue[count] = (int)(hsvMode[2]*100);//ทำไมตรงนี้ค่ามันมากกว่า [0...1]
//HueValueMode[count] = hsvMode[0];
ModeValue[count] = (int)(hsvMode[2]*100);
HueValueMode[count] = hsvMode[0];
//can create h at this
count++;
}
}
}
public static int Mode(int a[]) {//ModeValue[]
for (int i = 0; i < a.length; ++i) {
int count = 0;
for (int j = 0; j < a.length; ++j) {
if (a[j] == a[i])
++count;
}
if (count > maxCount) {
maxCount = count;
maxValue = a[i];
}
}
return (int) maxValue;
}
public static double averagehuemode(int a[]) {//ModeValue[]
double temp=0;
int count=0;
for (int i = 0; i < a.length; ++i) {
if (a[i] >= maxValue-5 && a[i] <= maxValue+5){//เลขตรงนี้
temp+= HueValueMode[i];
count++;
}
}
return temp/count;
}
public static void RGBtoHSV(int r, int g, int b, float[] hsvMode){
double h, s, v;
double min, max, delta;
min = Math.min(Math.min(r, g), b);
max = Math.max(Math.max(r, g), b);
// V
v = max/255;
delta = max - min;
// S
if( max != 0 )
s = delta / max;
else {
s = 0;
h = -1;
//return new double[]{h,s,v};
}
// H
if( r == max )
h = ( g - b ) / delta; // between yellow & magenta
else if( g == max )
h = 2 + ( b - r ) / delta; // between cyan & yellow
else
h = 4 + ( r - g ) / delta; // between magenta & cyan
h *= 60; // degrees
if( h < 0 )
h += 360;
hsvMode[0] = (int)(h);
hsvMode[1] = (float)(s);
hsvMode[2] = (float)(v);
}
Few points to consider:
After calling Camera.takePicture(), preview stops, so "to take more photos, call startPreview() first" - do in after saving image in onPictureTaken(), eg:
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
Toast.makeText(CaptureCamera.this, "Photo saved to folder \"sdcard\\DCIM\\CameraSnap\"", Toast.LENGTH_SHORT).show();
//start preview to take more pictures
mCamera.startPreview();
Do not swallow exceptions - if something goes wrong, you won't know it. Add printStackTrace() in onPictureTaken():
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
Toast.makeText(CaptureCamera.this, "Photo saved to folder \"sdcard\\DCIM\\CameraSnap\"", Toast.LENGTH_SHORT).show();
//start preview to take more pictures
mCamera.startPreview();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
When calling other activities, instead of:
Intent intent1 = new Intent(CaptureCamera.this, MainMenuActivity.class);
finish();
startActivity(intent1);
call finish() last, so:
Intent intent1 = new Intent(CaptureCamera.this, MainMenuActivity.class);
startActivity(intent1);
finish();
I am using a PHP MySQL server model. I want to decode the JSON data that is sent to the server by this code. Any help would be appreciated. The app sends current GPS coordinates to inputed server as JSON array object. I would like to decode it to share with other android users. Thanks a lot. This is the Android code.
public class TrackerService extends Service {
private static final String TAG = "TripTracker/Service";
private final String updatesCache = "updates.cache";
public static TrackerService service;
private NotificationManager nm;
private Notification notification;
private static boolean isRunning = false;
private String freqString;
private int freqSeconds;
private String endpoint;
private final int MAX_RING_SIZE = 15;
private LocationListener locationListener;
private AlarmManager alarmManager;
private PendingIntent pendingAlarm;
private static volatile PowerManager.WakeLock wakeLock;
private AsyncTask httpPoster;
ArrayList<LogMessage> mLogRing = new ArrayList<LogMessage>();
ArrayList<Messenger> mClients = new ArrayList<Messenger>();
ArrayList<List> mUpdates = new ArrayList<List>();
final ReentrantReadWriteLock updateLock = new ReentrantReadWriteLock();
final Messenger mMessenger = new Messenger(new IncomingHandler());
static final int MSG_REGISTER_CLIENT = 1;
static final int MSG_UNREGISTER_CLIENT = 2;
static final int MSG_LOG = 3;
static final int MSG_LOG_RING = 4;
#Override
public IBinder onBind(Intent intent) {
return mMessenger.getBinder();
}
#Override
public void onCreate() {
super.onCreate();
TrackerService.service = this;
endpoint = Prefs.getEndpoint(this);
freqSeconds = 0;
freqString = null;
freqString = Prefs.getUpdateFreq(this);
if (freqString != null && !freqString.equals("")) {
try {
Pattern p = Pattern.compile("(\\d+)(m|h|s)");
Matcher m = p.matcher(freqString);
m.find();
freqSeconds = Integer.parseInt(m.group(1));
if (m.group(2).equals("h"))
freqSeconds *= (60 * 60);
else if (m.group(2).equals("m"))
freqSeconds *= 60;
}
catch (Exception e) {
}
}
if (endpoint == null || endpoint.equals("")) {
logText("invalid endpoint, stopping service");
stopSelf();
}
if (freqSeconds < 1) {
logText("invalid frequency (" + freqSeconds + "), stopping " +
"service");
stopSelf();
}
readCache();
showNotification();
isRunning = true;
/* we're not registered yet, so this will just log to our ring buffer,
* but as soon as the client connects we send the log buffer anyway */
logText("service started, requesting location update every " +
freqString);
/* findAndSendLocation() will callback to this */
locationListener = new LocationListener() {
public void onLocationChanged(Location location) {
sendLocation(location);
}
public void onStatusChanged(String provider, int status,
Bundle extras) {
}
public void onProviderEnabled(String provider) {
}
public void onProviderDisabled(String provider) {
}
};
/* we don't need to be exact in our frequency, try to conserve at least
* a little battery */
alarmManager = (AlarmManager)getSystemService(ALARM_SERVICE);
Intent i = new Intent(this, AlarmBroadcast.class);
pendingAlarm = PendingIntent.getBroadcast(this, 0, i, 0);
alarmManager.setInexactRepeating(AlarmManager.ELAPSED_REALTIME_WAKEUP,
SystemClock.elapsedRealtime(), freqSeconds * 1000, pendingAlarm);
}
#Override
public void onDestroy() {
super.onDestroy();
if (httpPoster != null)
httpPoster.cancel(true);
try {
LocationManager locationManager = (LocationManager)
this.getSystemService(Context.LOCATION_SERVICE);
locationManager.removeUpdates(locationListener);
}
catch (Exception e) {
}
/* kill persistent notification */
nm.cancelAll();
if (pendingAlarm != null)
alarmManager.cancel(pendingAlarm);
isRunning = false;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
return START_STICKY;
}
/* must be done inside of updateLock */
public void cacheUpdates() {
OutputStreamWriter cacheStream = null;
try {
FileOutputStream cacheFile = TrackerService.this.openFileOutput(
updatesCache, Activity.MODE_PRIVATE);
cacheStream = new OutputStreamWriter(cacheFile, "UTF-8");
/* would be nice to just serialize mUpdates but it's not
* serializable. create a json array of json objects, each object
* having each key/value pair of one location update. */
JSONArray ja = new JSONArray();
for (int i = 0; i < mUpdates.size(); i++) {
List<NameValuePair> pair = mUpdates.get(i);
JSONObject jo = new JSONObject();
for (int j = 0; j < pair.size(); j++) {
try {
jo.put(((NameValuePair)pair.get(j)).getName(),
pair.get(j).getValue());
}
catch (JSONException e) {
}
}
ja.put(jo);
}
cacheStream.write(ja.toString());
cacheFile.getFD().sync();
}
catch (IOException e) {
Log.w(TAG, e);
}
finally {
if (cacheStream != null) {
try {
cacheStream.close();
}
catch (IOException e) {
}
}
}
}
/* read json cache into mUpdates */
public void readCache() {
updateLock.writeLock().lock();
InputStreamReader cacheStream = null;
try {
FileInputStream cacheFile = TrackerService.this.openFileInput(
updatesCache);
StringBuffer buf = new StringBuffer("");
byte[] bbuf = new byte[1024];
int len;
while ((len = cacheFile.read(bbuf)) != -1)
buf.append(new String(bbuf));
JSONArray ja = new JSONArray(new String(buf));
mUpdates = new ArrayList<List>();
for (int j = 0; j < ja.length(); j++) {
JSONObject jo = ja.getJSONObject(j);
List<NameValuePair> nvp = new ArrayList<NameValuePair>(2);
Iterator<String> i = jo.keys();
while (i.hasNext()) {
String k = (String)i.next();
String v = jo.getString(k);
nvp.add(new BasicNameValuePair(k, v));
}
mUpdates.add(nvp);
}
if (mUpdates.size() > 0)
logText("read " + mUpdates.size() + " update" +
(mUpdates.size() == 1 ? "" : "s") + " from cache");
}
catch (JSONException e) {
}
catch (FileNotFoundException e) {
}
catch (IOException e) {
Log.w(TAG, e);
}
finally {
if (cacheStream != null) {
try {
cacheStream.close();
}
catch (IOException e) {
}
}
}
updateLock.writeLock().unlock();
}
/* called within wake lock from broadcast receiver, but assert that we have
* it so we can keep it longer when we return (since the location request
* uses a callback) and then free it when we're done running through the
* queue */
public void findAndSendLocation() {
if (wakeLock == null) {
PowerManager pm = (PowerManager)this.getSystemService(
Context.POWER_SERVICE);
/* we don't need the screen on */
wakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK,
"triptracker");
wakeLock.setReferenceCounted(true);
}
if (!wakeLock.isHeld())
wakeLock.acquire();
LocationManager locationManager = (LocationManager)
this.getSystemService(Context.LOCATION_SERVICE);
locationManager.requestSingleUpdate(LocationManager.GPS_PROVIDER,
locationListener, null);
}
public static boolean isRunning() {
return isRunning;
}
private void showNotification() {
nm = (NotificationManager)getSystemService(NOTIFICATION_SERVICE);
notification = new Notification(R.drawable.icon,
"Trip Tracker Started", System.currentTimeMillis());
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
new Intent(this, MainActivity.class), 0);
notification.setLatestEventInfo(this, "Trip Tracker",
"Sending location every " + freqString, contentIntent);
notification.flags = Notification.FLAG_ONGOING_EVENT;
nm.notify(1, notification);
}
private void updateNotification(String text) {
if (nm != null) {
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
new Intent(this, MainActivity.class), 0);
notification.setLatestEventInfo(this, "Trip Tracker", text,
contentIntent);
notification.when = System.currentTimeMillis();
nm.notify(1, notification);
}
}
public void logText(String log) {
LogMessage lm = new LogMessage(new Date(), log);
mLogRing.add(lm);
if (mLogRing.size() > MAX_RING_SIZE)
mLogRing.remove(0);
updateNotification(log);
for (int i = mClients.size() - 1; i >= 0; i--) {
try {
Bundle b = new Bundle();
b.putString("log", log);
Message msg = Message.obtain(null, MSG_LOG);
msg.setData(b);
mClients.get(i).send(msg);
}
catch (RemoteException e) {
/* client is dead, how did this happen */
mClients.remove(i);
}
}
}
/* flatten an array of NameValuePairs into an array of
* locations[0]latitude, locations[1]latitude, etc. */
public List<NameValuePair> getUpdatesAsArray() {
List<NameValuePair> pairs = new ArrayList<NameValuePair>(2);
for (int i = 0; i < mUpdates.size(); i++) {
List<NameValuePair> pair = mUpdates.get(i);
for (int j = 0; j < pair.size(); j++)
pairs.add(new BasicNameValuePair("locations[" + i + "][" +
((NameValuePair)pair.get(j)).getName() + "]",
pair.get(j).getValue()));
}
return pairs;
}
public int getUpdatesSize() {
return mUpdates.size();
}
public void removeUpdate(int i) {
mUpdates.remove(i);
}
private void sendLocation(Location location) {
List<NameValuePair> pairs = new ArrayList<NameValuePair>(2);
pairs.add(new BasicNameValuePair("time",
String.valueOf(location.getTime())));
pairs.add(new BasicNameValuePair("latitude",
String.valueOf(location.getLatitude())));
pairs.add(new BasicNameValuePair("longitude",
String.valueOf(location.getLongitude())));
pairs.add(new BasicNameValuePair("speed",
String.valueOf(location.getSpeed())));
/* push these pairs onto the queue, and only run the poster if another
* one isn't running already (if it is, it will keep running through
* the queue until it's empty) */
updateLock.writeLock().lock();
mUpdates.add(pairs);
int size = service.getUpdatesSize();
cacheUpdates();
updateLock.writeLock().unlock();
logText("location " +
(new DecimalFormat("#.######").format(location.getLatitude())) +
", " +
(new DecimalFormat("#.######").format(location.getLongitude())) +
(size <= 1 ? "" : " (" + size + " queued)"));
if (httpPoster == null ||
httpPoster.getStatus() == AsyncTask.Status.FINISHED)
(httpPoster = new HttpPoster()).execute();
}
class IncomingHandler extends Handler {
#Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_REGISTER_CLIENT:
mClients.add(msg.replyTo);
/* respond with our log ring to show what we've been up to */
try {
Message replyMsg = Message.obtain(null, MSG_LOG_RING);
replyMsg.obj = mLogRing;
msg.replyTo.send(replyMsg);
}
catch (RemoteException e) {
}
break;
case MSG_UNREGISTER_CLIENT:
mClients.remove(msg.replyTo);
break;
default:
super.handleMessage(msg);
}
}
}
/* Void as first arg causes a crash, no idea why
E/AndroidRuntime(17157): Caused by: java.lang.ClassCastException: java.lang.Object[] cannot be cast to java.lang.Void[]
*/
class HttpPoster extends AsyncTask<Object, Void, Boolean> {
#Override
protected Boolean doInBackground(Object... o) {
TrackerService service = TrackerService.service;
int retried = 0;
int max_retries = 4;
while (true) {
if (isCancelled())
return false;
boolean failed = false;
updateLock.writeLock().lock();
List<NameValuePair> pairs = service.getUpdatesAsArray();
int pairSize = service.getUpdatesSize();
updateLock.writeLock().unlock();
AndroidHttpClient httpClient =
AndroidHttpClient.newInstance("TripTracker");
try {
HttpPost post = new HttpPost(endpoint);
post.setEntity(new UrlEncodedFormEntity(pairs));
HttpResponse resp = httpClient.execute(post);
int httpStatus = resp.getStatusLine().getStatusCode();
if (httpStatus == 200) {
/* all good, we can remove everything we've sent from
* the queue (but not just clear it, in case another
* one jumped onto the end while we were here) */
updateLock.writeLock().lock();
for (int i = pairSize - 1; i >= 0; i--)
service.removeUpdate(i);
updateLock.writeLock().unlock();
}
else {
logText("POST failed to " + endpoint + ": got " +
httpStatus + " status");
failed = true;
}
}
catch (Exception e) {
logText("POST failed to " + endpoint + ": " + e);
Log.w(TAG, e);
failed = true;
}
finally {
if (httpClient != null)
httpClient.close();
}
if (failed) {
/* if our initial request failed, snooze for a bit and try
* again, the server might not be reachable */
SystemClock.sleep(15 * 1000);
if (++retried > max_retries) {
/* give up since we're holding the wake lock open for
* too long. we'll get it next time, champ. */
logText("too many failures, retrying later (queue " +
"size " + service.getUpdatesSize() + ")");
break;
}
}
else
retried = 0;
int q = 0;
updateLock.writeLock().lock();
q = service.getUpdatesSize();
cacheUpdates();
updateLock.writeLock().unlock();
if (q == 0)
break;
/* otherwise, run through the rest of the queue */
}
return false;
}
protected void onPostExecute(Boolean b) {
if (wakeLock != null && wakeLock.isHeld())
wakeLock.release();
}
}
}
You can decode the data using json_decode()
<?php
$json = ""; // JSON data
$json_obj = json_decode($json); // As Object
$json_arr = json_decide($json,true) // As Array
var_dump($json_obj);
print_r($json_arr);
// Store Array to database using serialize function
$data = serialize($json_array); // Array will be converted into string
?>