I'm trying to create a barcode scanner with the camera2 and ML-kit API from Google. I've finally managed to get the preview working, but have no idea how to get the picture itself and pass that on to the ML-kit API.
I've tried using the image reader class, but somehow the onImageAvailable class is not being called.
Here is the code:
import ...
public class barcodeScannerActivity extends AppCompatActivity
implements OnRequestPermissionsResultCallback {
CameraManager mCameraManager;
SurfaceView mSurfaceViewPreview;
Surface mSurfacePreview;
CaptureRequest.Builder mPreviewRequestBuilder;
CaptureRequest mPreviewRequest;
List<Surface> mSurfaceList;
ImageReader mImageReader;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_barcode_scanner);
FirebaseVisionBarcodeDetectorOptions options =
new FirebaseVisionBarcodeDetectorOptions.Builder()
.setBarcodeFormats(
FirebaseVisionBarcode.FORMAT_EAN_13,
FirebaseVisionBarcode.FORMAT_EAN_8,
FirebaseVisionBarcode.FORMAT_UPC_A,
FirebaseVisionBarcode.FORMAT_UPC_E)
.build();
FirebaseVisionBarcodeDetector detector = FirebaseVision.getInstance().
getVisionBarcodeDetector(options);
initCamera();
}
private void initCamera() {
try {
Log.d("debug", "camera initiated...");
int permission = ContextCompat.checkSelfPermission(getApplicationContext(),
android.Manifest.permission.CAMERA);
int granted = PackageManager.PERMISSION_GRANTED;
if(permission == granted) {
mCameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
String[] cameraIdList = mCameraManager.getCameraIdList();
if(cameraIdList.length == 0) {
throw new Exception("No camera found", null);
}
String backFacingCameraID = getBackFacingCameraID(cameraIdList);
if(backFacingCameraID != null) {
mSurfaceList = new ArrayList<>();
mSurfaceViewPreview = findViewById(R.id.barcodeScanner);
mSurfacePreview = mSurfaceViewPreview.getHolder().getSurface();
mSurfaceList.add(mSurfacePreview);
mCameraManager.openCamera(backFacingCameraID, cameraCallback, null);
} else {
//show error message that no backfacing camera is found.
}
} else {
ActivityCompat.requestPermissions(this,
new String[] {android.Manifest.permission.CAMERA}, 0);
}
} catch(Exception e) {
e.printStackTrace();
}
}
private CameraDevice.StateCallback cameraCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
try {
mPreviewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(mSurfacePreview);
mPreviewRequest = mPreviewRequestBuilder.build();
cameraDevice.createCaptureSession(mSurfaceList, stateCallback, null);
} catch(Exception e) {
e.printStackTrace();
}
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int i) {}
};
private CameraCaptureSession.StateCallback stateCallback = new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
try {
cameraCaptureSession.setRepeatingRequest(mPreviewRequest, captureCallback ,null);
} catch(Exception e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {}
};
private CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureStarted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
}
#Override
public void onCaptureProgressed(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull CaptureResult partialResult) {
super.onCaptureProgressed(session, request, partialResult);
}
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Log.d("debug", String.valueOf(result.getPartialResults()));
}
#Override
public void onCaptureFailed(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
}
};
private String getBackFacingCameraID(String[] cameraIdList) {
String backFacingCameraID = null;
try {
for(String cameraID:cameraIdList) {
CameraCharacteristics characteristics =
mCameraManager.getCameraCharacteristics(cameraID);
if(characteristics.get(CameraCharacteristics.LENS_FACING) == 1) {
backFacingCameraID = cameraID;
}
}
} catch (Exception e) {
e.printStackTrace();
}
return backFacingCameraID;
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
initCamera();
}
public void backButton(View view) {}
}
It seems that I forgot to add:
mPreviewRequestBuilder.addTarget(mImageReaderSurface);
Also, the camera froze after 5 frames. To help this, please refer to Camera2 ImageReader freezes repeating capture request
Related
As title says, I am trying to preview the two rear cameras on my phone on two separate textureviews. What I can do is preview the front camera and any of the rear cameras, but when I try to preview both rear cameras I get an "ERROR_MAX_CAMERAS_IN_USE" from the onError state callback method for the last-opened camera. By searching on the Internet and on here I couldn't really find any pointers on what I'm doing wrong.
For completeness here's the code of my main activity java class, in which I setup the cameras and obtain the previews:
public class MainActivity extends AppCompatActivity {
private final TextureView[] textureViews = new TextureView[2];
protected CameraDevice[] cameraDevices = new CameraDevice[2];
protected CameraCaptureSession[] cameraCaptureSessions = new CameraCaptureSession[2];
protected CaptureRequest.Builder[] captureRequestBuilders = new CaptureRequest.Builder[2];
private Size imageDimension;
private static final int REQUEST_CAMERA_PERMISSION = 200;
private final Handler[] mBackgroundHandlers = new Handler[2];
private final HandlerThread[] mBackgroundThreads = new HandlerThread[2];
private final int[] cameraIDs = {0, 2};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
textureViews[0] = findViewById(R.id.left);
textureViews[1] = findViewById(R.id.right);
assert textureViews[0] != null;
assert textureViews[1] != null;
textureViews[0].setSurfaceTextureListener(textureListener0);
textureViews[1].setSurfaceTextureListener(textureListener1);
}
TextureView.SurfaceTextureListener textureListener0 = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
openCamera(0);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { }
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) { }
};
TextureView.SurfaceTextureListener textureListener1 = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
openCamera(1);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { }
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) { }
};
private final CameraDevice.StateCallback stateCallback0 = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
if(Debug.LOG) {
Log.e(Debug.TAG, camera.getId()+ " onOpened.");
}
cameraDevices[0] = camera;
createCameraPreview(0);
}
#Override
public void onDisconnected(CameraDevice camera) {
cameraDevices[0].close();
}
#Override
public void onError(CameraDevice camera, int error) {
cameraDevices[0].close();
cameraDevices[0] = null;
}
};
private final CameraDevice.StateCallback stateCallback1 = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
if(Debug.LOG) {
Log.e(Debug.TAG, camera.getId() + " onOpened.");
}
cameraDevices[1] = camera;
createCameraPreview(1);
}
#Override
public void onDisconnected(CameraDevice camera) {
cameraDevices[1].close();
}
#Override
public void onError(CameraDevice camera, int error) {
cameraDevices[1].close();
cameraDevices[1] = null;
}
};
protected void startBackgroundThreads() {
mBackgroundThreads[0] = new HandlerThread("Left camera Background");
mBackgroundThreads[0].start();
mBackgroundHandlers[0] = new Handler(mBackgroundThreads[0].getLooper());
mBackgroundThreads[1] = new HandlerThread("Right camera Background");
mBackgroundThreads[1].start();
mBackgroundHandlers[1] = new Handler(mBackgroundThreads[1].getLooper());
}
protected void stopBackgroundThreads() {
mBackgroundThreads[0].quitSafely();
mBackgroundThreads[1].quitSafely();
try {
mBackgroundThreads[0].join();
mBackgroundThreads[0] = null;
mBackgroundHandlers[0] = null;
mBackgroundThreads[1].join();
mBackgroundThreads[1] = null;
mBackgroundHandlers[1] = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
protected void createCameraPreview(int cameraID) {
try {
SurfaceTexture texture = textureViews[cameraID].getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilders[cameraID] = cameraDevices[cameraID].createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilders[cameraID].addTarget(surface);
cameraDevices[cameraID].createCaptureSession(Collections.singletonList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
if (null == cameraDevices[cameraID]) {
return;
}
cameraCaptureSessions[cameraID] = cameraCaptureSession;
updatePreview(cameraID);
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(MainActivity.this, "Configuration change", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera(int cameraID) {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
if(Debug.LOG) {
Log.e(Debug.TAG, "openCamera " + cameraIDs[cameraID]);
}
try {
String cameraId = manager.getCameraIdList()[cameraIDs[cameraID]];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
// Add permission for camera and let user grant the permission
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_CAMERA_PERMISSION);
return;
}
manager.openCamera(cameraId, (cameraID == 0) ? stateCallback0 : stateCallback1, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
if(Debug.LOG) {
Log.e(Debug.TAG, "openCamera exited");
}
}
protected void updatePreview(int cameraID) {
if (null == cameraDevices[cameraID]) {
if(Debug.LOG) {
Log.e(Debug.TAG, "updatePreview error, return");
}
}
captureRequestBuilders[cameraID].set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
try {
cameraCaptureSessions[cameraID].setRepeatingRequest(captureRequestBuilders[cameraID].build(), null, mBackgroundHandlers[cameraID]);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void closeCamera(int cameraID) {
if (null != cameraDevices[cameraID]) {
cameraDevices[cameraID].close();
cameraDevices[cameraID] = null;
}
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults[0] == PackageManager.PERMISSION_DENIED) {
finish();
}
}
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThreads();
if(textureViews[0].isAvailable()) {
openCamera(0);
} else {
textureViews[0].setSurfaceTextureListener(textureListener0);
}
if(textureViews[1].isAvailable()) {
openCamera(1);
} else {
textureViews[1].setSurfaceTextureListener(textureListener1);
}
}
#Override
protected void onPause() {
closeCamera(0);
closeCamera(1);
stopBackgroundThreads();
super.onPause();
}
I am trying to record video using media recorder and camera2 but the app crashes as soon as mediarecorder.start() function is encountered. In the oncreate first prepareCamera is called and then trigger is called. I am a bit new to camera2. Can anyone help me find out why it is happening so?
public void prepareCamera() throws CameraAccessException {
manager = (CameraManager) getSystemService(CAMERA_SERVICE);
String[] cameras = manager.getCameraIdList();
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
Log.v("mycontroller","permission not granted");
return;
}
Log.v("mycontroller","permission granted "+cameras[0]);
manager.openCamera(cameras[0], new CameraDevice.StateCallback(){
#Override
public void onOpened(CameraDevice camera) {
Log.v("mycontroller","camera opened");
mCamera2 = camera;
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.MPEG_4_SP);
try {
mediaRecorder.setOutputFile(createFile().getAbsolutePath());
mediaRecorder.prepare();
Log.v("mycontroller","recorder prepared");
List<Surface> list = new ArrayList<>();
list.add(mediaRecorder.getSurface());
final CaptureRequest.Builder captureRequest = mCamera2.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
captureRequest.addTarget(mediaRecorder.getSurface());
mCaptureRequest = captureRequest.build();
mCamera2.createCaptureSession(list, new CameraCaptureSession.StateCallback(){
#Override
public void onConfigured(CameraCaptureSession session) {
mSession = session;
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
mSession = session;
}
}, null);
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public void onDisconnected(CameraDevice camera) {}
#Override
public void onError(CameraDevice camera, int error) {}
}, null);
}
public void trigger() {
try {
mediaRecorder.start();
mSession.setRepeatingRequest(mCaptureRequest,
new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
Log.v("mycontroller","camera started capturing");
super.onCaptureStarted(session, request, timestamp, frameNumber);
}
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
Log.v("mycontroller","camera stoped capturing");
super.onCaptureCompleted(session, request, result);
}
}, null);
} catch (CameraAccessException e) {
Log.v("mycontroller",e.getMessage());
e.printStackTrace();
}
}
private void releaseMediaRecorder() throws CameraAccessException {
mSession.stopRepeating();
try {
mediaRecorder.stop();
mediaRecorder.reset();
mediaRecorder.release();
}
catch (Exception e){}
mediaRecorder= null;
mCamera2=null;
}
Take a look at Google's Camera2Video sample to see if you can find any critical differences between your code and the sample:
https://github.com/googlearchive/android-Camera2Video/tree/master/Application/src/main/java/com/example/android/camera2video
You may need to set more MediaRecorder settings like resolution; often the CamcorderProfile class is used to do this easily.
am writing a body measurement app. I have been able to create the camera and it stores the image. I want to use the captured image in another activity where i called the method 'bodyMeasurement'
I really need help in achieving this. Further corrections are also welcomed. Thanks
Here is my Camera Activity;
CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
cameraDevice = camera;
createCameraPreview();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
cameraDevice.close();
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int i) {
cameraDevice.close();
cameraDevice=null;
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
textureView = (TextureView)findViewById(R.id.textureView);
//From Java 1.4 , you can use keyword 'assert' to check expression true or false
assert textureView != null;
textureView.setSurfaceTextureListener(textureListener);
btnCapture = (ImageButton)findViewById(R.id.btnCapture);
/* Dexter.withActivity(this)
.withPermissions(Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.CAMERA)
.withListener(new MultiplePermissionsListener() {
#Override
public void onPermissionsChecked(MultiplePermissionsReport report) {
if (!report.areAllPermissionsGranted()) {
Toast.makeText(MainActivity.this, "You need to grant all permission to use this app features", Toast.LENGTH_SHORT).show();
}
}
#Override
public void onPermissionRationaleShouldBeShown(List<PermissionRequest> permissions, PermissionToken token) {
}
})
.check();*/
btnCapture.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
takePicture();
}
});
}
private void takePicture() {
if(cameraDevice == null)
return;
CameraManager manager = (CameraManager)getSystemService(Context.CAMERA_SERVICE);
try{
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
if(characteristics != null)
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
.getOutputSizes(ImageFormat.JPEG);
//Capture image with custom size
int width = 4608;
int height = 3456;
if(jpegSizes != null && jpegSizes.length > 0)
{
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
final ImageReader reader = ImageReader.newInstance(width,height,ImageFormat.JPEG,1);
List<Surface> outputSurface = new ArrayList<>(2);
outputSurface.add(reader.getSurface());
outputSurface.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
//Check orientation base on device
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION,ORIENTATIONS.get(rotation));
file = new File(Environment.getExternalStorageDirectory()+"/"+ ".CnatraSamp"+".jpg");
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader imageReader) {
Image image = null;
try{
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
}
catch (FileNotFoundException e)
{
e.printStackTrace();
}
catch (IOException e)
{
e.printStackTrace();
}
finally {
{
if(image != null)
image.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream outputStream = null;
try{
outputStream = new FileOutputStream(file);
outputStream.write(bytes);
}finally {
if(outputStream != null)
outputStream.close();
}
}
};
reader.setOnImageAvailableListener(readerListener,mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(MainActivity.this, "Saved "+file, Toast.LENGTH_SHORT).show();
finish();
Intent i = new Intent(MainActivity.this,BlankActivity.class);
startActivity(i);
finish();
}
};
cameraDevice.createCaptureSession(outputSurface, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
try{
cameraCaptureSession.capture(captureBuilder.build(),captureListener,mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
}
},mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void createCameraPreview() {
try{
SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(imageDimension.getWidth(),imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
if(cameraDevice == null)
return;
cameraCaptureSessions = cameraCaptureSession;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(MainActivity.this, "Changed", Toast.LENGTH_SHORT).show();
}
},null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void updatePreview() {
if(cameraDevice == null)
Toast.makeText(this, "Error", Toast.LENGTH_SHORT).show();
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE,CaptureRequest.CONTROL_MODE_AUTO);
try{
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(),null,mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera() {
CameraManager manager = (CameraManager)getSystemService(Context.CAMERA_SERVICE);
try{
cameraId = manager.getCameraIdList()[1];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
imageDimension = map.getOutputSizes(SurfaceTexture.class)[1];
//Check realtime permission if run higher API 23
if(ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED)
{
ActivityCompat.requestPermissions(this,new String[]{
Manifest.permission.CAMERA,
Manifest.permission.WRITE_EXTERNAL_STORAGE
},REQUEST_CAMERA_PERMISSION);
return;
}
manager.openCamera(cameraId,stateCallback,null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
if(requestCode == REQUEST_CAMERA_PERMISSION)
{
if(grantResults[0] != PackageManager.PERMISSION_GRANTED)
{
Toast.makeText(this, "You can't use camera without permission", Toast.LENGTH_SHORT).show();
finish();
}
}
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if(textureView.isAvailable())
openCamera();
else
textureView.setSurfaceTextureListener(textureListener);
}
#Override
protected void onPause() {
stopBackgroundThread();
super.onPause();
}
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try{
mBackgroundThread.join();
mBackgroundThread= null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
the activity i need the image;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_blank);
height = findViewById(R.id.height);
cancel = findViewById(R.id.cancel);
btnctn = findViewById(R.id.continuebtn);
cmIn = findViewById(R.id.cmIn);
height.setInputType(InputType.TYPE_CLASS_NUMBER |
InputType.TYPE_NUMBER_FLAG_DECIMAL |
InputType.TYPE_NUMBER_FLAG_SIGNED);
btnctn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (height.getText().toString().trim().isEmpty()){
Toast.makeText(BlankActivity.this,"Please input your height..",Toast.LENGTH_LONG).show();
}else {
final ProgressDialog progressDialog = new ProgressDialog(BlankActivity.this);
progressDialog.setMessage("Getting Your Measurement");
progressDialog.setTitle("Please wait!");
progressDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
progressDialog.show(); // Display Progress Dialog
progressDialog.setCancelable(false);
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
Intent i = new Intent(BlankActivity.this, SettingsActivity.class);
startActivity(i);
progressDialog.dismiss();
}
},5000);
}
//place measurement object
bodyMeasurement();
}
});
cancel.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
BlankActivity.this.finish();
Intent i = new Intent(BlankActivity.this, HomeActivity.class);
startActivity(i);
finish();
}
});
cmIn.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if(isChecked){
num = Double.parseDouble(String.valueOf(height.getText()));
in = 0.3937 * num;
String r = String.valueOf(in);
height.setText(r);
}else {
num = Double.parseDouble(String.valueOf(height.getText()));
cm = num / 0.3937;
String r = String.valueOf(cm);
height.setText(r);
}
}
});
}
private void bodyMeasurement() {
}
}
As mentioned in the comment, you can get the absolute path from the saved image and pass it through an intent to the Activity that it needs it.
To achieve this, you might want to convert your image file to a BitMap first, which will give you the actual path of the image (sometimes .getAbsolutePath() method returns a wrong file path, so it is a better solution to convert your image to a bitmap first).
This can be achieved:
String filePath = file.getPath();
Bitmap imgBitmap = BitmapFactory.decodeFile(filePath);
Then to obtain the real path of the image you saved you need to
1) Obtain the Uri of the created Bitmap:
public Uri getUri(Context context, Bitmap imgBitmap) {
ByteArrayOutputStream bOutputStream = new ByteArrayOutputStream();
String path = MediaStore.Images.Media.insertImage(context.getContentResolver(),
imgBitmap, "Title", null);
return Uri.parse(path);
}
2) Fetch the real path from Uri:
public String getRealPath(Uri uri) {
String [] proj={MediaStore.Images.Media.DATA};
Cursor cursor = getContentResolver().query(uri, proj, null, null, null);
int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
String path = cursor.getString(column_index);
cursor.close();
return path;
}
In your first activity you can pass the path of your image through an intent to the other activity, then fetch it and display it/etc.
Intent intent = new Intent(context,OtherActivity.class);
intent.putExtra("PATH",path);
startActivity(intent);
Receive the data you passed:
Intent intent = getIntent();
String path = intent.getStringExtra("PATH");
//convert the path to image/bitmap and display the image
You can also pass the entire Bitmap to the Activity you need it, but I strongly recommend that you don't, because it will use a lot of memory.
I'm trying to configure a camera compatibility without preview but after taking second photo the app crashes with exception:
2018-12-27 14:36:20.392 12389-12977/com.example.android.braillefeeder E/RequestThread-0: Received device exception during capture call:
java.io.IOException: setPreviewTexture failed
at android.hardware.Camera.setPreviewTexture(Native Method)
at android.hardware.camera2.legacy.RequestThreadManager.doJpegCapturePrepare(RequestThreadManager.java:298)
at android.hardware.camera2.legacy.RequestThreadManager.-wrap1(Unknown Source:0)
at android.hardware.camera2.legacy.RequestThreadManager$5.handleMessage(RequestThreadManager.java:830)
at android.os.Handler.dispatchMessage(Handler.java:101)
at android.os.Looper.loop(Looper.java:164)
at android.os.HandlerThread.run(HandlerThread.java:65)
I have this code:
I'm targeting for 28 SDK version. I thought the problem may have something with thread so I tried to initialize thread in main but with no success.
public class CameraService {
// Size of taken photo
private static final int IMAGE_WIDTH = 1280;
private static final int IMAGE_HEIGHT = 960;
private CameraDevice mCameraDevice;
private CameraCaptureSession mCameraCaptureSession;
private HandlerThread backgroundThread;
private Handler backgroundHandler;
private ImageReader mImageReader;
private CameraService() {
}
private static class InstanceHolder {
private static CameraService sCameraService = new CameraService();
}
public static CameraService getInstance() {
return InstanceHolder.sCameraService;
}
public void initializeCamera(Context context,
ImageReader.OnImageAvailableListener onImageAvailableListener) {
CameraManager cameraManager = (CameraManager) context.getSystemService(CAMERA_SERVICE);
String[] camIds = {};
try {
camIds = cameraManager.getCameraIdList();
} catch (CameraAccessException e) {
e.printStackTrace();
}
if( camIds.length < 1) {
Log.e("CameraService", "Camera not available.");
return;
}
startBackgroundThread();
mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT, ImageFormat.JPEG, 2);
mImageReader.setOnImageAvailableListener(onImageAvailableListener, backgroundHandler);
try {
cameraManager.openCamera(camIds[0], mStateCallback, backgroundHandler);
} catch (SecurityException e) {
e.printStackTrace();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startBackgroundThread() {
backgroundThread = new HandlerThread("CameraBackground");
backgroundThread.start();
backgroundHandler = new Handler(backgroundThread.getLooper());
}
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
backgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int i) {
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onClosed(#NonNull CameraDevice camera) {
mCameraDevice = null;
}
};
public void takePicture() {
Log.d("CameraService", "takePicture()");
if( mCameraDevice == null) {
Log.d("CameraService", "Cannot take picture. Camera device is null.");
return;
}
try {
mCameraDevice.createCaptureSession(Collections.singletonList(mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
if( mCameraDevice == null) {
Log.e("mStateCallback", " mStateCallbackCaptureSession configured");
return;
}
Log.d("CameraService", "imageCapture()");
mCameraCaptureSession = cameraCaptureSession;
imageCapture();
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Log.e("mStateCallback", "Configure failed");
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void imageCapture() {
Log.d("CameraService", "imageCapture()");
try {
final CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
builder.addTarget(mImageReader.getSurface());
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
mCameraCaptureSession.stopRepeating();
mCameraCaptureSession.capture(builder.build(), mCaptureCallback, null);
} catch (CameraAccessException e) {
Log.e("imagecapture()", "KOKOTKO");
e.printStackTrace();
}
}
private final CameraCaptureSession.CaptureCallback mCaptureCallback =
new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureStarted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
}
#Override
public void onCaptureProgressed(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull CaptureResult partialResult) {
super.onCaptureProgressed(session, request, partialResult);
}
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull TotalCaptureResult result) {
if( session != null) {
session.close();
}
}
};
public void shutdown() {
Log.d("CameraService", "shutdown()");
if( mCameraDevice != null) {
mCameraDevice.close();
}
if( mCameraCaptureSession != null) {
mCameraCaptureSession.close();
}
stopBackgroundThread();
}
}
Thanks
I have same issue with camera2 Api in second time capture image. That problem cause you not close image image.close in image reader listener after get bytes;
I hope this help
I'm aware that this question has been asked before but all of the answers I have looked at have failed to give me the answer (most probably because some of them I can't understand but also because there are a lot of different solutions it seems and I can't see the wood for the trees).
I'm hoping that by providing my code that someone will be able to point me in the right direction.
Here is my class used to take the picture:
public class CameraPhotoActivity extends AppCompatActivity {
private Button btnCapture;
private TextureView textureView;
// to check the state orientation of the output image
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private String cameraId;
private CameraDevice cameraDevice;
private CameraCaptureSession cameraCaptureSessions;
private CaptureRequest.Builder captureRequestBuilder;
private Size imageDimension;
private ImageReader imageReader;
// save to file
private File file;
private static final int REQUEST_CAMERA_PERMISSION = 200;
private boolean mFlashSupported;
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
cameraDevice = camera;
createCameraPreview();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
cameraDevice.close();
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int i) {
cameraDevice.close();
cameraDevice = null;
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera_photo);
textureView = (TextureView) findViewById(R.id.textureView);
assert textureView != null;
textureView.setSurfaceTextureListener(textureListener);
btnCapture = (Button) findViewById(R.id.btnCapture);
btnCapture.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
takePicture();
}
});
}
private void takePicture() {
if (cameraDevice == null)
return;
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null)
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
.getOutputSizes(ImageFormat.JPEG);
// capture the image with custom size
int width = 640;
int height = 480;
if (jpegSizes != null && jpegSizes.length > 0) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
final ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurface = new ArrayList<>(2);
outputSurface.add(reader.getSurface());
outputSurface.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// check the orientation on the device
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
file = new File(Environment.getExternalStorageDirectory() + "/" + UUID.randomUUID().toString()
+ ".jpg");
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader imageReader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
{
if (image != null)
image.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream outputStream = null;
try {
outputStream = new FileOutputStream(file);
outputStream.write(bytes);
} finally {
if (outputStream != null)
outputStream.close();
}
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(CameraPhotoActivity.this, "Saved " + file, Toast.LENGTH_SHORT).show();
createCameraPreview();
}
};
cameraDevice.createCaptureSession(outputSurface, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
try {
cameraCaptureSession.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void createCameraPreview() {
try {
SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
if (cameraDevice == null)
return;
cameraCaptureSessions = cameraCaptureSession;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(CameraPhotoActivity.this, "Changed", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void updatePreview() {
if (cameraDevice == null)
Toast.makeText(this, "Error", Toast.LENGTH_SHORT).show();
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera() {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
// check realtime permission if run higher than API 23
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{
Manifest.permission.CAMERA,
Manifest.permission.WRITE_EXTERNAL_STORAGE
}, REQUEST_CAMERA_PERMISSION);
return;
}
manager.openCamera(cameraId, stateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(this, "You can't use the camera without permission", Toast.LENGTH_SHORT).show();
finish();
}
}
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if (textureView.isAvailable())
openCamera();
else
textureView.setSurfaceTextureListener(textureListener);
}
#Override
protected void onPause() {
stopBackgroundThread();
super.onPause();
}
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
}
I have tried adding the following but, if it's correct, I can't seem to put it in the correct place:
MediaScannerConnection.scanFile(this, new String[]{file.toString()}, null,
new MediaScannerConnection.OnScanCompletedListener() {
public void onScanCompleted(String path, Uri uri) {
Log.i("External Storage", "Scanned" + path + ":");
Log.i("External Storage", "-> uri=" + uri);
}
});
Thanks for any help. Much appreciated.
This means "context". You need to place the code within the activity class, once the image has been fetched from the camera. Also call connect before calling scanFile. You can try below code.
MediaScannerConnection.scanFile(CameraPhotoActivity.this, new String[]{file.toString()}, null,
new MediaScannerConnection.OnScanCompletedListener() {
public void onScanCompleted(String path, Uri uri) {
Log.i("External Storage", "Scanned" + path + ":");
Log.i("External Storage", "-> uri=" + uri);
}
});