Method called after release() even though I reopen camera - java

The following code takes a picture with the camera and works fine.
When I pause the application (by going back to the home menu) and restart the app, the camera image does not get saved anymore.
Even though I release and re-open the camera on pause/resume, I get the error when calling snapPicture: "Method called after release()".
This is my code, and the line 'Log.w("Error message: ", e.getLocalizedMessage());' displays the error.
Can anyone help me to solve this problem?
camerapreview.java
package com.example.testproject;
import java.io.IOException;
import android.content.Context;
import android.hardware.Camera;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class CameraPreview extends SurfaceView implements
SurfaceHolder.Callback {
private SurfaceHolder mSurfaceHolder;
private Camera mCamera;
// Constructor that obtains context and camera
#SuppressWarnings("deprecation")
public CameraPreview(Context context, Camera camera) {
super(context);
this.mCamera = camera;
this.mSurfaceHolder = this.getHolder();
this.mSurfaceHolder.addCallback(this);
this.mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
try {
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (IOException e) {
// left blank for now
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format,
int width, int height) {
// start preview with new settings
try {
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (Exception e) {
// intentionally left blank for a test
}
}
}
custom_cameraactivity.java
package com.example.testproject;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.sql.Date;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
public class Custom_CameraActivity extends Activity {
private Camera mCamera;
private CameraPreview mCameraPreview;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
public void setCameraAndCameraPreview(Camera mcamera,
CameraPreview mCameraPreview) {
this.mCamera = mcamera;
this.mCameraPreview = mCameraPreview;
}
public String snapPicture() {
TakePictureTaskAsync takePictureTask = new TakePictureTaskAsync();
takePictureTask.setCamera(mCamera);
Calendar cal = Calendar.getInstance();
String useThisTimeStamp = new SimpleDateFormat("HHmmss").format(cal
.getTime());
takePictureTask.configureTimestamp(useThisTimeStamp);
takePictureTask.execute();
return useThisTimeStamp;
}
/**
* Helper method to access the camera returns null if it cannot get the
* camera or does not exist
*
* #return
*/
}
class TakePictureTaskAsync extends AsyncTask<Void, Void, Void> {
private Camera mCamera;
private String myTimeStamp;
public void setCamera(Camera mCamera) {
this.mCamera = mCamera;
}
public void configureTimestamp(String timeStamp) {
this.myTimeStamp = timeStamp;
}
#Override
protected void onPostExecute(Void result) {
}
#Override
protected Void doInBackground(Void... params) {
try {
mCamera.takePicture(null, null, mPicture);
} catch (Exception e) {
Log.w("Error message: ", e.getLocalizedMessage());
//this is where the error "method called after release" comes in
}
return null;
}
PictureCallback mPicture = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile();
if (pictureFile == null) {
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(resizeImage(data));
fos.close();
} catch (FileNotFoundException e) {
} catch (IOException e) {
}
try {
HTTPPost postPictureToURL = new HTTPPost();
postPictureToURL.setTimestamp(myTimeStamp);
postPictureToURL.executeImagePOST();
} catch (Exception e) {
e.printStackTrace();
}
}
};
private File getOutputMediaFile() {
File mediaStorageDir = new File(
Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
"testdir");
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
return null;
}
}
File mediaFile;
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "cameraView_" + myTimeStamp + ".jpg");
return mediaFile;
}
byte[] resizeImage(byte[] input) {
Bitmap original = BitmapFactory.decodeByteArray(input, 0, input.length);
Bitmap resized = Bitmap.createScaledBitmap(original, 800, 600, true);
ByteArrayOutputStream blob = new ByteArrayOutputStream();
resized.compress(Bitmap.CompressFormat.JPEG, 70, blob);
return blob.toByteArray();
}
}
Mainactivity.java
#Override
protected void onPause() {
super.onPause();
try {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCameraPreview.getHolder().removeCallback(mCameraPreview);
mCamera.release();
mCamera = null;
} catch (Exception e) {
e.printStackTrace();
}
}
protected void onResume() {
super.onResume();
if (mCamera != null) {
mCamera.startPreview();
} else {
mCamera = getCameraInstance();
mCamera.startPreview();
}
}
private Camera getCameraInstance() {
Camera camera = null;
try {
camera = Camera.open();
} catch (Exception e) {
//do nothing
}
return camera;
}

Related

E/Camera: Error 2 in Logcat when using camera in app (Java App Development for Android)

Been through hell coding my first android app.
E/Camera: Error 2 in Logcat whenever camera is used (the error code is for multiple camera uses)
I have attached the entire project if you want to and can run it. Kindly help.
Link to Project on Google Drive
I am also attaching the code to the main files in this post if you want to view it directly.
Here is MainActivity.java
package com.example.cse535a1;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.content.FileProvider;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.MotionEvent;
import android.widget.Button;
import android.content.Context;
import android.view.View;
import android.hardware.*;
import android.widget.FrameLayout;
import android.widget.TextView;
import org.opencv.core.*;
import org.opencv.videoio.VideoCapture;
import java.io.File;
public class MainActivity extends AppCompatActivity implements SensorEventListener {
private Camera c;
private CameraView cv1;
private FrameLayout view_camera;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (!(getApplicationContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA))) {
this.finish();
System.exit(0);
}
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
VideoCapture video_capture;
Button button_symptoms = (Button)findViewById(R.id.button_symptoms);
Button button_upload_signs = (Button)findViewById(R.id.button_upload_signs);
Button button_measure_heart_rate = (Button)findViewById(R.id.button_measure_heart_rate);
Button button_measure_respiratory_rate = (Button)findViewById(R.id.button_measure_respiratory_rate);
c = getcam();
cv1 = new CameraView(getApplicationContext(), c);
view_camera = (FrameLayout)findViewById(R.id.view_camera);
view_camera.addView(cv1);
TextView finger_on_sensor = (TextView)findViewById(R.id.text_finger_on_sensor);
finger_on_sensor.setVisibility(View.INVISIBLE);
finger_on_sensor.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View arg_view, MotionEvent arg_me) {
finger_on_sensor.setVisibility(View.INVISIBLE);
File file_video = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/video_finger.mp4");
final int VIDEO_CAPTURE = 1;
Intent intent_record_video = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
intent_record_video.putExtra(MediaStore.EXTRA_DURATION_LIMIT, 45);
Uri fileUri = FileProvider.getUriForFile(MainActivity.this, "com.example.cse535a1.provider", file_video);
intent_record_video.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
c.release();
startActivityForResult(intent_record_video, VIDEO_CAPTURE);
c.stopPreview();
return true;
}
});
button_symptoms.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
Intent intent = new Intent(getApplicationContext(), Loggin_symptoms.class);
startActivity(intent);
}
});
button_upload_signs.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
}
});
button_measure_heart_rate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
finger_on_sensor.setVisibility(View.VISIBLE);
}
});
button_measure_respiratory_rate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
SensorManager manager_sensor = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
Sensor sensor_accelerometer = manager_sensor.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
manager_sensor.registerListener(MainActivity.this, sensor_accelerometer, SensorManager.SENSOR_DELAY_NORMAL);
}
});
}
#Override
public void onSensorChanged(SensorEvent arg_event) {
float x = arg_event.values[0];
float y = arg_event.values[1];
float z = arg_event.values[2];
Log.i("ACCELEROMETER", String.valueOf(x) + ' ' + String.valueOf(y) + ' ' + String.valueOf(z));
}
#Override
public void onAccuracyChanged(Sensor arg_sensor, int arg_accuracy) {
}
public Camera getcam() {
Camera c = null;
try { c = Camera.open(0); }
catch (Exception e) {
}
return c;
}
#Override
protected void onResume() {
super.onResume();
c = getcam();
cv1 = new CameraView(getApplicationContext(), c);
view_camera.addView(cv1);
}
#Override
protected void onDestroy() {
c.stopPreview();
c.release();
c = null;
super.onDestroy();
}
}
Here is CameraView.java
package com.example.cse535a1;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.content.Context;
import android.hardware.Camera;
import java.io.IOException;
public class CameraView extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder holder_surface;
private Camera camera_selected;
public CameraView(Context arg_context, Camera arg_camera) {
super(arg_context);
// Log.i("Cam", "constructor");
camera_selected = arg_camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
holder_surface = getHolder();
holder_surface.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
holder_surface.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder arg_holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
camera_selected.setPreviewDisplay(arg_holder);
camera_selected.startPreview();
// Log.i("Cam", "surface creator");
} catch (IOException e) {
// Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
}
public void surfaceChanged(SurfaceHolder arg_holder, int arg_format, int arg_width, int arg_height) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (holder_surface.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
camera_selected.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
camera_selected.setPreviewDisplay(holder_surface);
camera_selected.startPreview();
} catch (Exception e){
// Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
}
Here are updated files. It seems removing statements which release camera seems to remove errors of using camera after releasing. Unlocking the camera in onPause() removed-> E/Camera: Error 2
MainActivity.java
package com.example.cse535a1;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.content.FileProvider;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.MotionEvent;
import android.widget.Button;
import android.content.Context;
import android.view.View;
import android.hardware.*;
import android.widget.FrameLayout;
import android.widget.TextView;
import org.opencv.core.*;
import org.opencv.videoio.VideoCapture;
import java.io.File;
public class MainActivity extends AppCompatActivity implements SensorEventListener {
private Camera c;
private CameraView cv1;
private FrameLayout view_camera;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (!(getApplicationContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA))) {
this.finish();
System.exit(0);
}
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
VideoCapture video_capture;
Button button_symptoms = (Button)findViewById(R.id.button_symptoms);
Button button_upload_signs = (Button)findViewById(R.id.button_upload_signs);
Button button_measure_heart_rate = (Button)findViewById(R.id.button_measure_heart_rate);
Button button_measure_respiratory_rate = (Button)findViewById(R.id.button_measure_respiratory_rate);
cv1 = new CameraView(getApplicationContext(), this);
view_camera = (FrameLayout)findViewById(R.id.view_camera);
view_camera.addView(cv1);
TextView finger_on_sensor = (TextView)findViewById(R.id.text_finger_on_sensor);
finger_on_sensor.setVisibility(View.INVISIBLE);
finger_on_sensor.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View arg_view, MotionEvent arg_me) {
finger_on_sensor.setVisibility(View.INVISIBLE);
File file_video = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/video_finger.mp4");
final int VIDEO_CAPTURE = 101;
Intent intent_record_video = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
intent_record_video.putExtra(MediaStore.EXTRA_DURATION_LIMIT, 45);
Uri fileUri = FileProvider.getUriForFile(MainActivity.this, "com.example.cse535a1.provider", file_video);
intent_record_video.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
startActivityForResult(intent_record_video, VIDEO_CAPTURE);
return false;
}
});
button_symptoms.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
Intent intent = new Intent(getApplicationContext(), Loggin_symptoms.class);
startActivity(intent);
}
});
button_upload_signs.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
}
});
button_measure_heart_rate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
finger_on_sensor.setVisibility(View.VISIBLE);
}
});
button_measure_respiratory_rate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
SensorManager manager_sensor = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
Sensor sensor_accelerometer = manager_sensor.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
manager_sensor.registerListener(MainActivity.this, sensor_accelerometer, SensorManager.SENSOR_DELAY_NORMAL);
}
});
}
public void setCam(Camera arg_camera) {
c = arg_camera;
}
#Override
public void onSensorChanged(SensorEvent arg_event) {
float x = arg_event.values[0];
float y = arg_event.values[1];
float z = arg_event.values[2];
Log.i("ACCELEROMETER", String.valueOf(x) + ' ' + String.valueOf(y) + ' ' + String.valueOf(z));
}
#Override
public void onAccuracyChanged(Sensor arg_sensor, int arg_accuracy) {
}
public Camera getcam() {
Camera c = null;
try { c = Camera.open(0); }
catch (Exception e) {
}
return c;
}
#Override
protected void onPause() {
super.onPause();
c.unlock();
// if (c != null) {
// c.stopPreview();
// c.release();
// c = null;
// }
}
#Override
protected void onResume() {
super.onResume();
// if (c != null) {
// c.stopPreview();
// c.release();
// c = null;
// }
// cv1 = new CameraView(getApplicationContext(), this);
// view_camera.addView(cv1);
}
#Override
protected void onDestroy() {
if (c != null) {
c.stopPreview();
c.release();
c = null;
}
super.onDestroy();
}
}
CameraView.java
package com.example.cse535a1;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.content.Context;
import android.hardware.Camera;
import java.io.IOException;
public class CameraView extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder holder_surface;
private Camera camera_selected;
MainActivity act1;
public CameraView(Context arg_context, MainActivity arg_activity) {
super(arg_context);
// camera_selected = arg_camera;
act1 = arg_activity;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
holder_surface = getHolder();
holder_surface.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
holder_surface.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder arg_holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
// Log.i("CAMNULL", "CAM IS : " + String.valueOf(camera_selected == null));
Camera c = null;
try {
c = Camera.open(0);
} catch (Exception e) {
Log.e("CAMERA", "Camera not opened");
}
act1.setCam(c);
camera_selected = c;
camera_selected.setPreviewDisplay(arg_holder);
// camera_selected.startPreview();
// Log.i("Cam", "surface creator");
} catch (IOException e) {
// Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
// if (camera_selected != null) {
// camera_selected.stopPreview();
// camera_selected.release();
// camera_selected = null;
// }
}
public void surfaceChanged(SurfaceHolder arg_holder, int arg_format, int arg_width, int arg_height) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (holder_surface.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
camera_selected.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
camera_selected.setPreviewDisplay(holder_surface);
camera_selected.startPreview();
} catch (Exception e){
// Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
}

How capture photo of camera periodically from service? [closed]

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 4 years ago.
Improve this question
I want capture periodically photo of camera from a service and send throught socket to server application (desktop software), then i tried make this with code below, but only a capture is done and not continues capturing.
Based in this answer i must define startPreview() before of capture with takePicture() then i'm already making this, but even so not works.
Someone could help me please?
package com.example.vrs.myexampleapp;
import android.Manifest;
import android.app.Activity;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.support.v4.app.ActivityCompat;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.WindowManager;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.List;
import static android.os.Environment.getExternalStorageDirectory;
#SuppressWarnings("deprecation")
public class MyCamera extends Service {
public final int DONE = 1;
public final int NEXT = 2;
public final int PERIOD = 1000;
private Camera camera;
private Timer timer;
private int cameraId = 0;
SurfaceHolder previewHolder;
public class Timer extends AsyncTask<Void, Void, Void> {
Context mContext;
private Handler threadHandler;
public Timer(Context context, Handler threadHandler) {
super();
this.threadHandler = threadHandler;
mContext = context;
}
#Override
protected Void doInBackground(Void... params) {
try {
Thread.sleep(PERIOD);
Message.obtain(threadHandler, DONE, "").sendToTarget();
} catch (InterruptedException e) {
e.printStackTrace();
}
return null;
}
}
private int findFrontFacingCamera() {
int cameraId = -1;
int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; i++) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
Log.i("MyCamera", "Camera found");
cameraId = i;
break;
}
}
return cameraId;
}
public void startCamera() {
if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA)
== PackageManager.PERMISSION_GRANTED && getPackageManager()
.hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
cameraId = findFrontFacingCamera();
if (cameraId < 0) {
Log.i("MyCamera", "No front facing camera found.");
} else {
safeCameraOpen(cameraId);
}
if (android.os.Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
SurfaceView dummy = new SurfaceView(this);
previewHolder = dummy.getHolder();
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
try {
camera.setPreviewDisplay(previewHolder);
} catch (IOException e1) {
e1.printStackTrace();
}
} else {
SurfaceTexture surfaceTexture = new SurfaceTexture(MODE_PRIVATE);
try {
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
}
camera.startPreview();
if (android.os.Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
try {
camera.setPreviewDisplay(previewHolder);
} catch (IOException e1) {
e1.printStackTrace();
}
}
Camera.Parameters params = camera.getParameters();
params.setJpegQuality(100);
camera.setParameters(params);
timer = new Timer(getApplicationContext(), threadHandler);
timer.execute();
}
}
public static DataOutputStream dos;
public static byte[] array;
private Handler threadHandler = new Handler() {
public void handleMessage(android.os.Message msg) {
switch (msg.what) {
case DONE:
camera.startPreview();
camera.takePicture(null, null, mCall);
break;
case NEXT:
Log.i("MyCamera", "Here in NEXT!!");
timer = new Timer(getApplicationContext(), threadHandler);
timer.execute();
break;
}
}
};
Camera.PictureCallback mCall = new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Log.i("MyCamera", "Here in PictureCallback");
if (data != null) {
Matrix mtx = new Matrix();
mtx.postRotate(270);
Bitmap bitmapPicture = BitmapFactory.decodeByteArray(data, 0, data.length);
bitmapPicture = Bitmap.createScaledBitmap(bitmapPicture, 360, 360, true);
Bitmap rotatedBMP = Bitmap.createBitmap(bitmapPicture, 0, 0, bitmapPicture.getWidth(), bitmapPicture.getHeight(), mtx, true);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try {
rotatedBMP.compress(Bitmap.CompressFormat.JPEG, 100, bos);
array = Methods.compress(bos.toByteArray());
new ConnAsyncTask().execute();
Message.obtain(threadHandler, NEXT, "").sendToTarget(); // Capture a new photo
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
static class ConnAsyncTask extends AsyncTask<Void, Void, Void> {
protected Void doInBackground(Void... params) {
try {
dos = new DataOutputStream(SocketBackgroundService.yclientSocket.getOutputStream());
dos.writeInt(array.length);
dos.write(array, 0, array.length);
dos.flush();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}
private boolean safeCameraOpen(int id) {
boolean qOpened = false;
try {
stopCamera();
camera = Camera.open(id);
qOpened = (camera != null);
} catch (Exception e) {
Log.i("MyCamera", "failed to open Camera");
e.printStackTrace();
}
return qOpened;
}
public void stopCamera() {
if (camera != null) {
camera.stopPreview();
camera.release();
camera = null;
}
}
public static MyCamera instance;
#Override
public void onCreate() {
super.onCreate();
Log.i("MyCamera", "Service created!!!");
instance = this;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.i("MyCamera", "onStartCommand() service started!!!");
instance = this;
return START_STICKY;
}
#Override
public void onDestroy() {
super.onDestroy();
stopCamera();
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
}
I'm not entirely sure why you're using an AsyncTask (your Timer class) for a delay.
Replace that with a simple handler.postDelayed().

Using camera inside an app using surfaceview in android

I am creating an app where i can use the camera inside it without going to the default app of the phone.So i have used a surface view and tried to save it onto the phone, but the onPictureTaken never gets called.The complete code is shown below:
package com.example.surfaceviewcamera;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.hardware.Camera;
import android.os.Bundle;
import android.app.Activity;
import android.content.Context;
import android.content.ContextWrapper;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.Toast;
public class MainActivity extends Activity implements SurfaceHolder.Callback {
SurfaceView mSurfaceView;
SurfaceHolder mSurfaceHolder;
Camera mCamera;
boolean mPreviewRunning;
Button btncapture;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
btncapture=(Button) findViewById(R.id.btncapture);
mSurfaceView = (SurfaceView) findViewById(R.id.surface_camera);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
btncapture.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
public void onPictureTaken(byte[] imageData, Camera c) {
Bitmap bitmap = BitmapFactory.decodeByteArray(imageData , 0, imageData .length);
String file_path=saveToInternalSorage(bitmap);
Toast.makeText(getApplicationContext(),"Image stored succesfully at "+file_path,Toast.LENGTH_LONG).show();
}
};
}
});
}
private String saveToInternalSorage(Bitmap bitmapImage){
ContextWrapper cw = new ContextWrapper(getApplicationContext());
// path to /data/data/yourapp/app_data/imageDir
File directory = cw.getDir("imageDir", Context.MODE_PRIVATE);
// Create imageDir
File mypath=new File(directory,"marina1.jpg");
FileOutputStream fos = null;
try {
fos = new FileOutputStream(mypath);
// Use the compress method on the BitMap object to write image to the OutputStream
bitmapImage.compress(Bitmap.CompressFormat.PNG, 100, fos);
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
return directory.getAbsolutePath();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
mCamera=Camera.open();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int w,
int h) {
if (mPreviewRunning) {
mCamera.stopPreview();
}
Camera.Parameters p = mCamera.getParameters();
p.setPreviewSize(w, h);
mCamera.setParameters(p);
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
mPreviewRunning = true;
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mCamera.stopPreview();
mPreviewRunning = false;
mCamera.release();
}
}
My XML file:
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical">
<SurfaceView
android:id="#+id/surface_camera"
android:layout_width="fill_parent"
android:layout_height="10dip"
android:layout_weight="1">
</SurfaceView>
<Button
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="CAPTURE IMAGE"
android:id="#+id/btncapture"
android:layout_gravity="center"
/>
</LinearLayout>
call camera.takePicture(null, null, callback); from onClick() and define the callback outside the onCreate()
for reference look into this link
public class MainActivity extends Activity implements SurfaceHolder.Callback {
SurfaceView mSurfaceView;
SurfaceHolder mSurfaceHolder;
Camera mCamera;
boolean mPreviewRunning;
Button btncapture;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
btncapture=(Button) findViewById(R.id.btncapture);
mSurfaceView = (SurfaceView) findViewById(R.id.surface_camera);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
btncapture.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
//take picture here
mCamera.takePicture(null, null, mPictureCallback);
}
});
}
Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
public void onPictureTaken(byte[] imageData, Camera c) {
Bitmap bitmap = BitmapFactory.decodeByteArray(imageData , 0, imageData .length);
String file_path=saveToInternalSorage(bitmap);
Toast.makeText(getApplicationContext(),"Image stored succesfully at "+file_path,Toast.LENGTH_LONG).show();
}
};
private String saveToInternalSorage(Bitmap bitmapImage){
ContextWrapper cw = new ContextWrapper(getApplicationContext());
// path to /data/data/yourapp/app_data/imageDir
File directory = cw.getDir("imageDir", Context.MODE_PRIVATE);
// Create imageDir
File mypath=new File(directory,"marina1.jpg");
FileOutputStream fos = null;
try {
fos = new FileOutputStream(mypath);
// Use the compress method on the BitMap object to write image to the OutputStream
bitmapImage.compress(Bitmap.CompressFormat.PNG, 100, fos);
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
return directory.getAbsolutePath();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
mCamera=Camera.open();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int w,
int h) {
if (mPreviewRunning) {
mCamera.stopPreview();
}
Camera.Parameters p = mCamera.getParameters();
p.setPreviewSize(w, h);
mCamera.setParameters(p);
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
mPreviewRunning = true;
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mCamera.stopPreview();
mPreviewRunning = false;
mCamera.release();
}
}

Camera takePicture returns java.lang.NullPointerException?

I wrote code to capture image automatically at background.
For this, I made a thread and taking picture in that.
My code looks like
package com.camsharp;
import java.io.File;
import java.io.FileOutputStream;
import java.util.Timer;
import java.util.TimerTask;
import android.app.Activity;
import android.content.Context;
import android.hardware.Camera;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.widget.FrameLayout;
import android.widget.Toast;
public class MainActivity extends Activity {
private int cameraId = 0;
private Camera mCamera;
private CameraPreview mPreview;
String fileName = "tempImage.jpeg";
File file;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d("EXEC ", "EXECUTED ");
setContentView(R.layout.activity_main);
// Create an instance of Camera
mCamera = getCameraInstance(cameraId);
if (mCamera == null) {
Toast.makeText(
getApplicationContext(),
"The camera service is currently unavailable, please try again!",
Toast.LENGTH_LONG).show();
finish();
} else {
// Create our Preview view and set it as the content of our
// activity.
mPreview = new CameraPreview(this, mCamera);
FrameLayout frameLayout = (FrameLayout) findViewById(R.id.camera_preview);
frameLayout.addView(mPreview);
}
// start thread for these
MyTimerTask myTask = new MyTimerTask();
Timer myTimer = new Timer();
// public void schedule (TimerTask task, long delay, long period)
// Schedule a task for repeated fixed-delay execution after a specific
// delay.
//
// Parameters
// task the task to schedule.
// delay amount of time in milliseconds before first execution.
// period amount of time in milliseconds between subsequent executions.
myTimer.schedule(myTask, 3000, 1500);
}
class MyTimerTask extends TimerTask {
public void run() {
try {
Log.e("SUCCESS ", "IT IS OKAY ");
//mCamera.takePicture(null, null, null, mPictureCallback);
mCamera.takePicture(null, null, mPictureCallback);
file = new File(getFilesDir(), fileName);
System.out.println(file);
} catch (Exception e) {
Log.e("Error ", "EXCEPTION ");
e.printStackTrace();
}
Log.e("LOG ", "timer testing");
}
}
Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
public void onPictureTaken(byte[] imageData, Camera c) {
Log.e("Callback TAG", "Here in jpeg Callback");
if (imageData != null) {
FileOutputStream outputStream;
try {
outputStream = openFileOutput(fileName,
Context.MODE_PRIVATE);
outputStream.write(imageData);
outputStream.close();
// Intent intent = new Intent(SnapScreen.this,
// PreviewScreen.class);
// if (fromMessageReview == true) {
// intent.putExtra("fromMessageReview", "true");
// }
// startActivity(intent);
// overridePendingTransition(R.anim.slide_in,
// R.anim.slide_out);
finish();
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
#Override
protected void onDestroy() {
super.onDestroy();
releaseCamera();
}
/** A safe way to get an instance of the Camera object. */
public static Camera getCameraInstance(int cameraId) {
Camera c = null;
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
c = Camera.open(cameraId);
} else {
c = Camera.open();
}
} catch (Exception e) {
c = null;
}
return c; // returns null if camera is unavailable
}
private void releaseCamera() {
if (mCamera != null) {
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
}
Here, when Thread runs, it gives exception, java.lang.NullPointerException
I tried.
mCamera.startPreview();
before taking new image, but not use.
Can anyone identify where I am doing mistake and why?

Android Front Facing Camera Android 4.0.3 ICS Start Failed

I'm trying to create a Android app that records a video from the front facing camera. The preview works fine, but as soon as i start recording the app crashes with the error "java.lang.RuntimeException: start failed." I've tried the same with the rear facing camera, but it has the same problem. Im working on a Samsung Tab 2 P5110 which is running Android ICS 4.0.3.
This is my Actvity class that shows the preview and should start the recording.
import java.io.IOException;
import java.lang.reflect.Method;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Field;
import android.app.Activity;
import android.content.Intent;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.View.OnClickListener;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.FrameLayout;
public class VideoActivity extends Activity implements OnClickListener,
SurfaceHolder.Callback, MediaRecorder.OnInfoListener {
MediaRecorder recorder;
SurfaceHolder holder;
Camera camera;
boolean recording = false;
public static final String TAG = "VIDEOCAPTURE";
private int show_id;
private CamcorderProfile mProfile;
private CountDownTimer cdt = null;
private static final int max_length_in_ms = 30000;
#Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
requestWindowFeature( Window.FEATURE_NO_TITLE );
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN );
show_id = getIntent().getIntExtra(Database.SHOW_ID, -1);
camera = this.openFrontFacingCamera(); // frontfacing camera
camera = camera.open(); //rearfacing
recorder = new MediaRecorder();
recorder.setCamera(camera);
setContentView(R.layout.video);
FrameLayout cameraView = (FrameLayout) findViewById(R.id.CameraView);
CameraSurfaceView cameraSurface = new CameraSurfaceView(getBaseContext(), camera);
cameraView.addView(cameraSurface);
holder = cameraSurface.getHolder();
cameraView.setClickable(true);
findViewById(R.id.recBtn).setOnClickListener(this);
}
private void initRecorder() {
recorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
recorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
recorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_LOW));
recorder.setOutputFile("/sdcard/videotemp.mp4");
recorder.setMaxDuration(VideoActivity.max_length_in_ms);
recorder.setMaxFileSize(50000000); // Approximately 50 megabytes
}
private void prepareRecorder() {
recorder.setPreviewDisplay(holder.getSurface());
try {
recorder.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
finish();
} catch (IOException e) {
e.printStackTrace();
finish();
}
}
public void onClick(View v) {
if (recording) {
recorder.stop();
recorder.release();
recording = false;
camera.release();
Log.v(TAG, "Recording Stopped");
// Let's initRecorder so we can record again
Intent intent = new Intent(this, UploadActivity.class);
intent.putExtra(Database.SHOW_ID, show_id);
intent.putExtra(Database.FILENAME, "/sdcard/videotemp.mp4");
intent.putExtra(Database.EXTENTION, ".mp4");
startActivity(intent);
finish();
} else {
recording = true;
initRecorder();
prepareRecorder();
recorder.start();
ProgressBar pb = (ProgressBar) findViewById(R.id.videoProgress);
pb.setMax(VideoActivity.max_length_in_ms);
this.cdt = new CountDownTimer(VideoActivity.max_length_in_ms, 50) {
public void onTick(long millisUntilFinished) {
ProgressBar pb = (ProgressBar) findViewById(R.id.videoProgress);
pb.setProgress( VideoActivity.max_length_in_ms - (int)((millisUntilFinished) ));
}
public void onFinish() {
ProgressBar pb = (ProgressBar) findViewById(R.id.videoProgress);
pb.setProgress(100);
}
};
this.cdt.start();
Log.v(TAG, "Recording Started");
}
}
private Camera openFrontFacingCamera() {
Camera camera = null;
// Look for front-facing camera, using the Gingerbread API.
// Java reflection is used for backwards compatibility with
// pre-Gingerbread APIs.
try {
Class<?> cameraClass = Class.forName("android.hardware.Camera");
Object cameraInfo = null;
Field field = null;
int cameraCount = 0;
Method getNumberOfCamerasMethod = cameraClass
.getMethod("getNumberOfCameras");
if (getNumberOfCamerasMethod != null) {
cameraCount = (Integer) getNumberOfCamerasMethod.invoke(null,
(Object[]) null);
}
Class<?> cameraInfoClass = Class
.forName("android.hardware.Camera$CameraInfo");
if (cameraInfoClass != null) {
cameraInfo = cameraInfoClass.newInstance();
}
if (cameraInfo != null) {
field = cameraInfo.getClass().getField("facing");
}
Method getCameraInfoMethod = cameraClass.getMethod("getCameraInfo",
Integer.TYPE, cameraInfoClass);
if (getCameraInfoMethod != null && cameraInfoClass != null
&& field != null) {
for (int camIdx = 0; camIdx < cameraCount; camIdx++) {
getCameraInfoMethod.invoke(null, camIdx, cameraInfo);
int facing = field.getInt(cameraInfo);
if (facing == 1) { // Camera.CameraInfo.CAMERA_FACING_FRONT
try {
Method cameraOpenMethod = cameraClass.getMethod(
"open", Integer.TYPE);
if (cameraOpenMethod != null) {
camera = (Camera) cameraOpenMethod.invoke(null,
camIdx);
mProfile = CamcorderProfile
.get(camIdx, CamcorderProfile.QUALITY_LOW);
}
} catch (RuntimeException e) {
Log.e(TAG,
"Camera failed to open: "
+ e.getLocalizedMessage());
}
}
}
}
}
// Ignore the bevy of checked exceptions the Java Reflection API throws
// - if it fails, who cares.
catch (ClassNotFoundException e) {
Log.e(TAG, "ClassNotFoundException" + e.getLocalizedMessage());
} catch (NoSuchMethodException e) {
Log.e(TAG, "NoSuchMethodException" + e.getLocalizedMessage());
} catch (NoSuchFieldException e) {
Log.e(TAG, "NoSuchFieldException" + e.getLocalizedMessage());
} catch (IllegalAccessException e) {
Log.e(TAG, "IllegalAccessException" + e.getLocalizedMessage());
} catch (InstantiationException e) {
Log.e(TAG, "InstantiationException" + e.getLocalizedMessage());
} catch (SecurityException e) {
Log.e(TAG, "SecurityException" + e.getLocalizedMessage());
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InvocationTargetException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (camera == null) {
// Try using the pre-Gingerbread APIs to open the camera.
try {
camera = Camera.open();
} catch (RuntimeException e) {
Log.e(TAG, "Camera failed to open: " + e.getLocalizedMessage());
}
}
return camera;
}
}
For the preview I'm working with a extended version of SurfaceView
package nl.raakict.android.VideoCapture;
import java.io.IOException;
import android.content.Context;
import android.hardware.Camera;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraSurfaceView(Context context, Camera camera) {
super(context);
mCamera = camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (IOException e) {
Log.d("DEBUG", "Error setting camera preview: " + e.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e){
Log.d("DEBUG", "Error starting camera preview: " + e.getMessage());
}
}
}
Any help would be greatly appreciated.

Categories

Resources