I have implemented a custom camera and it works well on all other smartphones but when i take photo in Nexus smartphones the image stored is good but the preview of the image shown to user on surface view is very dark but not the photo that is stored.
My classes are below
Preview.java
package com.custom.customcamera;
import java.io.IOException;
import java.util.List;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.Size;
import android.os.Handler;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
class Preview extends ViewGroup implements SurfaceHolder.Callback {
private final String TAG = "Preview";
SurfaceView mSurfaceView;
SurfaceHolder mHolder;
int heightmax ;
int widthmax ;
Size mPreviewSize;
List<Size> mSupportedPreviewSizes;
Camera mCamera;
#SuppressWarnings("deprecation")
Preview(Context context, SurfaceView sv) {
super(context);
mSurfaceView = sv;
// addView(mSurfaceView);
mHolder = mSurfaceView.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCamera(Camera camera) {
mCamera = camera;
if (mCamera != null) {
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPictureSizes();
requestLayout();
// get Camera parameters
Camera.Parameters params = mCamera.getParameters();
List<String> focusModes = params.getSupportedFocusModes();
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
// set the focus mode
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
// set Camera parameters
mCamera.setParameters(params);
}
params.setWhiteBalance(Parameters.WHITE_BALANCE_AUTO);
params.setSceneMode(Parameters.SCENE_MODE_AUTO);
//params.setPreviewFormat(256);
int index = params.getExposureCompensation ();
params.setExposureCompensation(index);
mCamera.setParameters(params);
}
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
// We purposely disregard child measurements because act as a
// wrapper to a SurfaceView that centers the camera preview instead
// of stretching it.
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if (mSupportedPreviewSizes != null) {
mPreviewSize=maxSize();
}
}
public Size maxSize(){
// heightmax =0;
// widthmax =0;
Size sizeMax=mSupportedPreviewSizes.get(0);
//long totalsize = heightmax*widthmax;
//long maxsize=mSupportedPreviewSizes.get(0).height*mSupportedPreviewSizes.get(0).width;
for(Size size:mSupportedPreviewSizes){
if(size.height*size.width>sizeMax.width*sizeMax.height){
sizeMax = size;
}
}
return sizeMax;
}
#Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
if (changed && getChildCount() > 0) {
final View child = getChildAt(0);
final int width = r - l;
final int height = b - t;
int previewWidth = width;
int previewHeight = height;
if (mPreviewSize != null) {
previewWidth = mPreviewSize.width;
previewHeight = mPreviewSize.height;
}
// Center the child SurfaceView within the parent.
if (width * previewHeight > height * previewWidth) {
final int scaledChildWidth = previewWidth * height / previewHeight;
child.layout((width - scaledChildWidth) / 2, 0,
(width + scaledChildWidth) / 2, height);
} else {
final int scaledChildHeight = previewHeight * width / previewWidth;
child.layout(0, (height - scaledChildHeight) / 2,
width, (height + scaledChildHeight) / 2);
}
}
}
public void surfaceCreated(final SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
try {
if (mCamera != null) {
mCamera.setPreviewDisplay(holder);
}
} catch (Exception exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
if (mCamera != null) {
mCamera.release();
//mCamera.stopPreview();
mCamera = null;
}
}
Camera.AutoFocusCallback mnAutoFocusCallback = new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean success, Camera camera) {
}
};
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if(mCamera != null) {
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPictureSize(mPreviewSize.width, mPreviewSize.height);
requestLayout();
mCamera.setParameters(parameters);
mCamera.startPreview();
}
}
}
CameraActivity.java
package com.custom.customcamera;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.util.Calendar;
import java.util.Date;
import java.util.UUID;
import android.util.Base64;
import com.kut.kutcamera.R;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.ErrorCallback;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.media.ExifInterface;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.app.Activity;
import android.content.Intent;
import android.util.Log;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.FrameLayout;
import android.widget.ImageView;
public class CameraActivity extends Activity {
Activity context;
Preview preview;
Camera camera;
ImageView fotoButton;
//ImageView foto;
ImageView CancelButton;
ImageView ConfirmButton;
//SurfaceView surfaceView;
String path = "";
static CameraListener listener;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
context=this;
Intent intent = getIntent();
String tempPath = intent.getStringExtra("Path");
this.path = tempPath;
fotoButton = (ImageView) findViewById(R.id.imageView_foto);
ConfirmButton = (ImageView) findViewById(R.id.imageView_confirm);
CancelButton = (ImageView) findViewById(R.id.imageView_cancel);
//foto = (ImageView) findViewById(R.id.imageView_photoTaken);
//surfaceView=(SurfaceView)findViewById(R.id.CustomCameraFragment);
ConfirmButton.setVisibility(View.INVISIBLE);
CancelButton.setVisibility(View.INVISIBLE);
//foto.setVisibility(View.INVISIBLE);
preview = new Preview(this,
(SurfaceView) findViewById(R.id.CustomCameraFragment));
FrameLayout frame = (FrameLayout) findViewById(R.id.preview);
frame.addView(preview);
preview.setKeepScreenOn(true);
fotoButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
try {
fotoButton.setImageResource(R.drawable.camera_focused);
takeFocusedPicture();
} catch (Exception e) {
}
fotoButton.setClickable(false);
}
});
}
public void initializeListener(CameraListener listener)
{
this.listener = listener;
}
private int cameraId = 0;
#Override
protected void onResume() {
super.onResume();
// TODO Auto-generated method stub
try
{
if(camera==null){
cameraId = findFrontFacingCamera();
camera = Camera.open(cameraId);
Camera.Parameters params = camera.getParameters();
params.setWhiteBalance(Parameters.WHITE_BALANCE_AUTO);
params.setSceneMode(Parameters.SCENE_MODE_AUTO);
//params.setPreviewFormat(256);
int index = params.getExposureCompensation ();
params.setExposureCompensation(index);
camera.setParameters(params);
camera.startPreview();
camera.setErrorCallback(new ErrorCallback() {
public void onError(int error, Camera mcamera) {
camera.release();
camera = Camera.open();
Log.d("Camera died", "error camera");
}
});
}
if (camera != null) {
if (Build.VERSION.SDK_INT >= 14)
setCameraDisplayOrientation(context, CameraInfo.CAMERA_FACING_FRONT, camera);
preview.setCamera(camera);
}
}
catch(Exception ex)
{
String s = ex.toString();
}
}
private int findFrontFacingCamera() {
int cameraId = -1;
// Search for the front facing camera
int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; i++) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
break;
}
}
return cameraId;
}
private void setCameraDisplayOrientation(Activity activity, int cameraId,
android.hardware.Camera camera) {
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
int rotation = activity.getWindowManager().getDefaultDisplay()
.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
Camera.AutoFocusCallback mAutoFocusCallback = new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean success, final Camera camera) {
try{
Camera.Parameters params = camera.getParameters();
//params.setWhiteBalance(Parameters.WHITE_BALANCE_AUTO);
//params.setSceneMode(Parameters.SCENE_MODE_AUTO);
//params.setPreviewFormat(256);
//int index = params.getExposureCompensation ();
//params.setExposureCompensation(index);
//camera.setParameters(params);
//camera.autoFocus(null);
camera.takePicture(mShutterCallback, null, jpegCallback);
}catch(Exception e){
String err = e.getMessage();
}
}
};
Camera.ShutterCallback mShutterCallback = new ShutterCallback() {
#Override
public void onShutter() {
// TODO Auto-generated method stub
}
};
public void takeFocusedPicture() {
camera.autoFocus(mAutoFocusCallback);
}
PictureCallback rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
}
};
String ImagePath = "";
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(final byte[] data, final Camera cameraSec) {
camera.stopPreview();
//Bitmap image = BitmapFactory.decodeByteArray(data, 0, data.length);
fotoButton.setVisibility(View.INVISIBLE);
ConfirmButton.setVisibility(View.VISIBLE);
CancelButton.setVisibility(View.VISIBLE);
//surfaceView.setVisibility(SurfaceView.GONE);
//foto.setVisibility(View.VISIBLE);
//foto.setVisibility(ImageView.VISIBLE);
//foto.setImageBitmap(image);
ConfirmButton.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View paramView, MotionEvent paramMotionEvent) {
try
{
ConfirmButton.setImageResource(R.drawable.confirmfocused);
FileOutputStream outStream = null;
Calendar c = Calendar.getInstance();
File videoDirectory = new File(path);
if (!videoDirectory.exists()) {
videoDirectory.mkdirs();
}
ImagePath = path + "/" + UUID.randomUUID().toString() + "_" +new Date().getTime() + ".jpg";
outStream = new FileOutputStream(ImagePath);
outStream.write(data);
outStream.close();
Bitmap realImage;
ExifInterface exif = new ExifInterface(ImagePath);
int rotation = (int)exifOrientationToDegrees(
exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL));
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 2;
realImage = BitmapFactory.decodeFile(ImagePath, options);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
realImage.compress(Bitmap.CompressFormat.JPEG, 100, baos); //bm is the bitmap object
byte[] byteArrayImage = baos.toByteArray();
String encodedImage = Base64.encodeToString(byteArrayImage, Base64.DEFAULT);
if(listener != null)
listener.PictureTaken(encodedImage,ImagePath,ImagePath.substring(ImagePath.lastIndexOf("/")+1));
Intent LaunchIntent = getPackageManager().getLaunchIntentForPackage(getBaseContext().getPackageName());
LaunchIntent.setFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT);
startActivity(LaunchIntent);
finish();
}
catch(Exception ex){}
return false;
}
});
CancelButton.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View paramView, MotionEvent paramMotionEvent) {
//surfaceView.setVisibility(SurfaceView.VISIBLE);
CancelButton.setImageResource(R.drawable.cancelfocused);
ConfirmButton.setVisibility(View.INVISIBLE);
CancelButton.setVisibility(View.INVISIBLE);
//foto.setVisibility(View.INVISIBLE);
CancelButton.setImageResource(R.drawable.cancel);
ConfirmButton.setImageResource(R.drawable.confirm);
fotoButton.setVisibility(View.VISIBLE);
fotoButton.setClickable(true);
fotoButton.setImageResource(R.drawable.camera);
cameraSec.startPreview();
return false;
}
});
}
};
private float exifOrientationToDegrees(int exifOrientation) {
if (exifOrientation == ExifInterface.ORIENTATION_ROTATE_90) {
return 90;
} else if (exifOrientation == ExifInterface.ORIENTATION_ROTATE_180) {
return 180;
} else if (exifOrientation == ExifInterface.ORIENTATION_ROTATE_270) {
return 270;
}
return 0;
}
public static Bitmap rotate(Bitmap source, float angle) {
Matrix matrix = new Matrix();
matrix.postRotate(angle);
return Bitmap.createBitmap(source, 0, 0, source.getWidth(),
source.getHeight(), matrix, false);
}
}
I have got a work around for this issue i made the image brighter and then shown it in an imageview. The code i used is given below:
foto.setColorFilter(brightIt(100));//foto is my ImageView
//and below is the brightIt func
public static ColorMatrixColorFilter brightIt(int fb) {
ColorMatrix cmB = new ColorMatrix();
cmB.set(new float[] {
1, 0, 0, 0, fb,
0, 1, 0, 0, fb,
0, 0, 1, 0, fb,
0, 0, 0, 1, 0 });
ColorMatrix colorMatrix = new ColorMatrix();
colorMatrix.set(cmB);
//Canvas c = new Canvas(b2);
//Paint paint = new Paint();
ColorMatrixColorFilter f = new ColorMatrixColorFilter(colorMatrix);
//paint.setColorFilter(f);
return f;
}
Just try to add a time interval of 1 seconds before taking Picture
new Handler().postDelayed(new Runnable()
{
#Override
public void run(){
camera.takePicture(mShutterCallback, null, jpegCallback);
}
},1000);
Related
I am trying to play some rtsp video on custom textureview.Video is playing for the first time but when I go to second activity where the same video is played (not reinstantiated,same session but with different textureview) there it is playing and coming back to first activity where I am setting the sureface again in onResume textureview is still showing the content where it was left for the first time.Interesting fact is like video is still playing, just it is not visible in the textureview and if I go to second screen like before it is showing.I tried both releasing the surfacetexture and not releasing, tried also releasing the surface which I am holding a reference in the first activity for future use, not any of those seems to work.What can be the possible reason?
in onResume I am checking if app is coming back from app drawer or full screen activity.Where making the app go to background by clicking home button and coming back to app from app drawer video is playing.Only when coming back from full screen it's not working.
StreamingActivity.java
package com.wiznsystems.android.activities;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.graphics.Point;
import android.graphics.SurfaceTexture;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.design.widget.Snackbar;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.Fragment;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.TextureView;
import android.view.View;
import android.view.Display;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.ProgressBar;
import android.widget.Toast;
import com.myeglu.android.R;
import com.myeglu.zoomview.ZoomTextureView;
import com.wiznsystems.android.App;
import com.wiznsystems.android.data.objects.FFMPEG;
import com.wiznsystems.android.utils.Constants;
import com.wiznsystems.android.utils.Events;
import com.wiznsystems.android.utils.FFMPEGPlayer;
import java.io.File;
import de.greenrobot.event.EventBus;
import hugo.weaving.DebugLog;
import timber.log.Timber;
/**
* An example full-screen activity that shows and hides the system UI (i.e.
* status bar and navigation/system bar) with user interaction.
*/
#SuppressWarnings("JniMissingFunction")
public class StreamingActivity extends Fragment implements TextureView.SurfaceTextureListener{
private static boolean loadedLibraries;
private boolean comingFromAppDrawer;
private boolean isComingFromFullScreen;
boolean anotherVideo=false;
boolean shouldTextureUpdate=false;
Surface surface;
public StreamingActivity(){
}
public static ZoomTextureView surfaceView;
private ProgressBar progressBar;
public static boolean isPlaying;
private int isInitialized;
public static int isFullScreenDisplayed=0;
private String url;
public boolean isFirstTime;
private FFMPEG ffmpeg;
private FrameLayout frameLayout;
private final String TAG=StreamingActivity.class.getSimpleName();
int w=0,h=0;
private boolean isFirstTimeForFullscreen=true;
private Button fullScreenButton;
FFMPEGPlayer ffmpegPlayer;
String buttonText="";
#Override
public void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
EventBus.getDefault().register(this);
url=getArguments().getString("url");
Log.d("ANURAN",url);
//getActivity().getWindow().addFlags(WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED);
ffmpegPlayer=App.getFFMPEG();
}
#Nullable
#Override
public View onCreateView(#NonNull LayoutInflater inflater, #Nullable ViewGroup container, #Nullable Bundle savedInstanceState) {
View view=inflater.inflate(R.layout.activity_streaming,null,false);
surfaceView = (ZoomTextureView) view.findViewById(R.id.textureView);
frameLayout=(FrameLayout)view.findViewById(R.id.streaming_framelayout);
progressBar = ((ProgressBar)view.findViewById(R.id.progressBar));
fullScreenButton=(Button)view.findViewById(R.id.fullScreenButton);
fullScreenButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
isFullScreenDisplayed = 1;
isComingFromFullScreen = true;
shouldTextureUpdate=true;
if (isFirstTimeForFullscreen) {
//ffmpegPlayer.libSetSurface(null);
isFirstTimeForFullscreen = false;
}
//surfaceView.getSurfaceTexture().release();
//surface.release();
progressBar.setVisibility(View.INVISIBLE);
goFullScreen();
} catch (Exception throwable) {
throwable.printStackTrace();
}
}
});
progressBar.setVisibility(View.VISIBLE);
Log.d("ANURAN","onCreateView called");
surfaceView.setSurfaceTextureListener(this);
return view;
}
#Override
public void onViewCreated(#NonNull View view, #Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
//Log.d(TAG,url);
}
#DebugLog
private void postInit() {
Events.PlayButtonBGChanger playButtonBGChanger=new Events.PlayButtonBGChanger();
if (isInitialized==0) {
playButtonBGChanger.setShouldChange(true);
initPlay();
progressBar.setVisibility(View.GONE);
} else if(isInitialized==-999){
playButtonBGChanger.setShouldChange(false);
progressBar.setVisibility(View.INVISIBLE);
Snackbar.make(frameLayout,"Please make sure you have stopped other playing videos before playing this one.",Snackbar.LENGTH_LONG).show();
}
else {
playButtonBGChanger.setShouldChange(false);
Snackbar.make(frameLayout,"Something went wrong while live streaming.Please try again later.",Snackbar.LENGTH_LONG).show();
}
EventBus.getDefault().post(playButtonBGChanger);
}
private void initPlay() {
try {
int[] res = ffmpegPlayer.libGetVideoRes();
Log.d("ANURAN", "res width " + res[0] + ": height " + res[1]);
if (res[0] <= 0) {
res[0] = 480;
}
if (res[1] <= 0) {
res[1] = 320;
}
int[] screenRes = getScreenRes();
int width, height;
float widthScaledRatio = screenRes[0] * 1.0f / res[0];
float heightScaledRatio = screenRes[1] * 1.0f / res[1];
if (widthScaledRatio > heightScaledRatio) {
//use heightScaledRatio
width = (int) (res[0] * heightScaledRatio);
height = screenRes[1];
} else {
//use widthScaledRatio
width = screenRes[0];
height = (int) (res[1] * widthScaledRatio);
}
Log.d(TAG, "width " + width + ",height:" + height);
w=width;
h=height;
updateSurfaceView(width, height);
try{
ffmpegPlayer.libSetup(width,height);
if(this.surface == null){
Log.d("ANURAN","surface is null");
}
if(anotherVideo)
ffmpegPlayer.libSetSurface(null);
ffmpegPlayer.libSetSurface(surface);
Log.d("ANURAN","libsetsurface set initPlay()");
}catch (Exception throwable){
Toast.makeText(getActivity(),"Something went wrong while live streaming.Try again",Toast.LENGTH_SHORT).show();
}
playMedia();
}catch (Exception throwable){
throwable.printStackTrace();
}
}
public FFMPEGPlayer getFFMPEGPlayer(){
return this.ffmpegPlayer;
}
private void playMedia() {
if(progressBar.getVisibility()==View.VISIBLE){
progressBar.setVisibility(View.GONE);
}
try{
ffmpegPlayer.libPlay();
}catch (Exception throwable){
Toast.makeText(getActivity(),"Something went wrong while live streaming.Try again",Toast.LENGTH_SHORT).show();
}
isPlaying = true;
CamerasActivity.isPlaying=true;
}
#DebugLog
private void updateSurfaceView(int pWidth, int pHeight) {
//update surfaceview dimension, this will cause the native window to change
Log.d("ANURAN UPDATE SURFACE", "width " + pWidth + ",height:" + pHeight);
FrameLayout.LayoutParams params = (FrameLayout.LayoutParams) surfaceView.getLayoutParams();
params.width = pWidth;
params.height = pHeight;
surfaceView.setLayoutParams(params);
surfaceView.requestLayout();
}
#DebugLog
#SuppressLint("NewApi")
private int[] getScreenRes() {
int[] res = new int[2];
Display display = getActivity().getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
res[0] = size.x;
res[1] = size.y;
return res;
}
public void stopPlaying() {
isPlaying = false;
try{
ffmpegPlayer.libStop();
// ffmpegPlayer.libSetSurface(null);
// surfaceView.getSurfaceTexture().release();
// surfaceView.getSurfaceTexture().detachFromGLContext();
}catch (Exception throwable){
}
}
#Override
public void onStop() {
// Toast.makeText(getActivity(),"onStop called",Toast.LENGTH_SHORT).show();
// stopPlaying();
comingFromAppDrawer=true;
// if(surfaceView.getSurfaceTexture() !=null){
// surfaceView.getSurfaceTexture().release();
//
// }
// if(surface !=null){
// surface.release();
// surface=null;
// }
// if(isFullScreenDisplayed==0){
// stopPlaying();
// }
Log.d("ANURAN onStop",surface.isValid()+"");
super.onStop();
}
#Override
public void onPause() {
// Toast.makeText(getActivity(),"onStop called",Toast.LENGTH_SHORT).show();
// stopPlaying();
isComingFromFullScreen=true;
super.onPause();
}
#Override
public void onResume() {
super.onResume();
if(isComingFromFullScreen){
progressBar.setVisibility(View.INVISIBLE);
if(surface !=null){
ffmpegPlayer.libSetSurface(null);
Log.d("ANURAN onResume","value of surface "+surface.isValid());
ffmpegPlayer.libSetSurface(surface);
}
}
else if(comingFromAppDrawer){
//stopPlaying();
progressBar.setVisibility(View.INVISIBLE);
if(surface !=null){
ffmpegPlayer.libSetSurface(null);
//ffmpegPlayer.libSetup(w,h);
ffmpegPlayer.libSetSurface(surface);
}
}
}
#Override
public void onDestroy() {
super.onDestroy();
stopPlaying();
}
#Override
public void onStart() {
super.onStart();
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
if(this.surface !=null) {
this.surface.release();
this.surface=null;
}
this.surface=new Surface(surface);
Log.d("ANURAN","surfacetexture available streaming activity");
new PlayVideo().execute();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
Log.d("ANURAN","surfacetexturesize changed streaming activity");
try{
w=width;
h=height;
this.surface.release();
this.surface=null;
this.surface=new Surface(surface);
if(isComingFromFullScreen){
//surfaceView.getHolder().getSurface().release();
updateSurfaceView(width, height);
ffmpegPlayer.libSetup(width,height);
ffmpegPlayer.libSetSurface(this.surface);
}
}catch (Exception throwable){
Toast.makeText(getActivity(),"Something went wrong while live streaming.Try again",Toast.LENGTH_SHORT).show();
}
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
Log.d("ANURAN","surfacetexture destroyed streaming activity");
// surfaceView.getSurfaceTexture().release();
// if(this.surface !=null){
// this.surface.release();
// this.surface=null;
// }
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
Log.d("ANURAN","surfacetexture updated streaming activity");
if(this.surface !=null){
this.surface.release();
this.surface=null;
this.surface=new Surface(surface);
}else{
this.surface=new Surface(surface);
}
}
public class PlayVideo extends AsyncTask<Void,Void,Void>{
#Override
protected Void doInBackground(Void... voids) {
try{
isInitialized=ffmpegPlayer.libInit(url);
}catch (Exception e){
e.printStackTrace();
Snackbar.make(frameLayout,"Exception Occured",Snackbar.LENGTH_SHORT).show();
}
return null;
}
#Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
isFirstTime=false;
postInit();
this.cancel(true);
}
}
public void onEvent(FFMPEG ffmpeg){
url = ffmpeg.getUrl();
progressBar.setVisibility(View.VISIBLE);
//stopPlaying();
anotherVideo=true;
new PlayVideo().execute();
}
public void onEvent(Events.UpdateUrl newurl){
url=newurl.getUrl();
}
public void onEvent(Events.StopPlayback event){
stopPlaying();
}
public void onEvent(Events.NotifyPlayer notifyPlayer){
Snackbar.make(frameLayout,"Please stop any running video before playing another one",Toast.LENGTH_SHORT).show();
}
private void goFullScreen(){
Intent intent=new Intent(getContext(),FullScreenActivity.class);
Bundle bundle=new Bundle();
bundle.putString("url",url);
intent.putExtras(bundle);
getActivity().startActivity(intent);
}
}
FullScreenActivity.java
package com.wiznsystems.android.activities;
import android.annotation.SuppressLint;
import android.graphics.Matrix;
import android.graphics.Point;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.Toast;
import com.myeglu.android.R;
import com.myeglu.zoomview.AngleView;
import com.myeglu.zoomview.MatrixChangeListener;
import com.myeglu.zoomview.ZoomTextureView;
import com.wiznsystems.android.App;
import com.wiznsystems.android.data.objects.FFMPEG;
import com.wiznsystems.android.utils.FFMPEGPlayer;
import uk.copywitchshame.senab.photoview.gestures.PhotoViewAttacher;
import hugo.weaving.DebugLog;
/**
* Created by anuran on 9/3/18.
*/
#SuppressWarnings("JniMissingFunction")
public class FullScreenActivity extends AppCompatActivity implements TextureView.SurfaceTextureListener, PhotoViewAttacher.OnMatrixChangedListener {
private ZoomTextureView surfaceView;
public static AngleView angleView;
private ProgressBar progressBar;
private PhotoViewAttacher photoViewAttacher;
public static boolean isPlaying;
private boolean isInitialized;
private String url;
private FrameLayout frameLayout;
Surface surface;
private final String TAG=StreamingActivity.class.getSimpleName();
public int backCounter=0;
FFMPEGPlayer ffmpegPlayer;
boolean fromADorSL=false;
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().requestFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_fullscreen);
surfaceView = (ZoomTextureView) findViewById(R.id.textureView);
angleView=(AngleView)findViewById(R.id.render_angle_view);
surfaceView.setSurfaceTextureListener(this);
frameLayout=(FrameLayout)findViewById(R.id.streaming_framelayout);
progressBar = ((ProgressBar)findViewById(R.id.progressBar));
progressBar.setVisibility(View.VISIBLE);
url=getIntent().getExtras().getString("url");
//new PlayVideo().execute();
ffmpegPlayer= App.getFFMPEG();
Log.d("ANURAN","fullscreen onCreate");
}
// #DebugLog
// private void postInit() {
// if (isInitialized) {
// initPlay();
// progressBar.setVisibility(View.GONE);
// } else {
// finish();
// }
// }
// private void initPlay() {
//
// try{
// int[] res = FFMPEGPlayer.libGetVideoRes();
// Log.d("ANURAN", "res width " + res[0] + ": height " + res[1]);
// if (res[0] <= 0) {
// res[0] = 480;
// }
// if (res[1] <= 0) {
// res[1] = 320;
// }
// int[] screenRes = getScreenRes();
// int width, height;
// float widthScaledRatio = screenRes[0] * 1.0f / res[0];
// float heightScaledRatio = screenRes[1] * 1.0f / res[1];
// if (widthScaledRatio > heightScaledRatio) {
// //use heightScaledRatio
// width = (int) (res[0] * heightScaledRatio);
// height = screenRes[1];
// } else {
// //use widthScaledRatio
// width = screenRes[0];
// height = (int) (res[1] * widthScaledRatio);
// }
// Log.d(TAG, "width " + width + ",height:" + height);
// updateSurfaceView(width, height);
// FFMPEGPlayer.libSetup(width, height);
// playMedia();
//
// photoViewAttacher = new PhotoViewAttacher(surfaceView, width, height);
// photoViewAttacher.setScaleType(ImageView.ScaleType.CENTER_CROP);
// photoViewAttacher.setOnMatrixChangeListener(this);
// photoViewAttacher.update();
// }catch (Exception e){
//
// }
// }
// private void playMedia() {
//
// try{
// if(progressBar.getVisibility()==View.VISIBLE){
// progressBar.setVisibility(View.GONE);
// }
// FFMPEGPlayer.libPlay();
// isPlaying = true;
// CamerasActivity.isPlaying=true;
// }catch (Exception e){
//
// }
// }
// #DebugLog
// private void updateSurfaceView(int pWidth, int pHeight) {
// //update surfaceview dimension, this will cause the native window to change
// Log.d("ANURAN UPDATE SURFACE", "width " + pWidth + ",height:" + pHeight);
// FrameLayout.LayoutParams params = (FrameLayout.LayoutParams) surfaceView.getLayoutParams();
// params.width = pWidth;
// params.height = pHeight;
// surfaceView.setLayoutParams(params);
// }
#Override
public void onBackPressed() {
if(backCounter==1){
super.onBackPressed();
}else{
//surface.release();
StreamingActivity.isFullScreenDisplayed=0;
surfaceView.getSurfaceTexture().release();
//ffmpegPlayer.libSetSurface(null);
if(this.surface !=null){
this.surface.release();
this.surface=null;
}
backCounter++;
Toast.makeText(FullScreenActivity.this,"Press back again to quit full screen",Toast.LENGTH_SHORT).show();
}
}
#DebugLog
#SuppressLint("NewApi")
private int[] getScreenRes() {
int[] res = new int[2];
Display display = getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
res[0] = size.x;
res[1] = size.y;
return res;
}
private void stopPlaying() {
isPlaying = false;
try{
ffmpegPlayer.libStop();
}catch (Exception e){
}
}
#Override
public void onStop() {
super.onStop();
fromADorSL=true;
}
#Override
public void onResume() {
super.onResume();
if(fromADorSL){
ffmpegPlayer.libSetSurface(null);
ffmpegPlayer.libSetSurface(this.surface);
}
}
#Override
public void onStart() {
super.onStart();
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
//Toast.makeText(FullScreenActivity.this,"SurfaceTexture available",Toast.LENGTH_SHORT).show();
//updateSurfaceView(width, height);
try{
this.surface=new Surface(surface);
ffmpegPlayer.libSetup(width, height);
ffmpegPlayer.libSetSurface(this.surface);
photoViewAttacher = new PhotoViewAttacher(surfaceView, width, height);
photoViewAttacher.setScaleType(ImageView.ScaleType.CENTER_CROP);
photoViewAttacher.setOnMatrixChangeListener(this);
photoViewAttacher.update();
progressBar.setVisibility(View.INVISIBLE);
angleView.setVisibility(View.VISIBLE);
}catch (Exception e){
}
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
try{
Toast.makeText(FullScreenActivity.this,"SurfaceTexture changed",Toast.LENGTH_SHORT).show();
if (photoViewAttacher != null ) {
photoViewAttacher.update ();
}
if(this.surface !=null){
this.surface.release();
}
this.surface=null;
this.surface=new Surface(surface);
ffmpegPlayer.libSetup(width,height);
ffmpegPlayer.libSetSurface(this.surface);
}catch (Exception e){
}
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
try{
// this.surface.release();
// this.surface=null;
surfaceView.getSurfaceTexture().release();
if(this.surface !=null){
this.surface.release();
this.surface=null;
}
//ffmpegPlayer.libSetSurface(null);
}catch (Exception e){
}
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
if(this.surface !=null){
this.surface.release();
this.surface=null;
this.surface=new Surface(surface);
}
}
#Override
public void onMatrixChanged(Matrix matrix, RectF rectF) {
float maxMovement = (rectF.width() - surfaceView.getWidth());
float middle = surfaceView.getWidth() * 0.5f + surfaceView.getLeft();
float currentMiddle = rectF.width() * 0.5f + rectF.left;
float angle=(-(int) ((currentMiddle - middle) * 100 / maxMovement));
Log.d("ANURAN",angle+"");
angleView.setCurrentProgress((int)angle);
}
// public class PlayVideo extends AsyncTask<Void,Void,Void> {
//
// #Override
// protected Void doInBackground(Void... voids) {
// try{
// isInitialized=(FFMPEGPlayer.libInit(url)==0);
// }catch (Exception e){
// e.printStackTrace();
// }
// return null;
// }
//
// #Override
// protected void onPostExecute(Void aVoid) {
// super.onPostExecute(aVoid);
// postInit();
// this.cancel(true);
// }
// }
}
I'm experiencing the problem concerning getSystemService method, as the Android Studio IDE keeps telling me it just cannot resolve this method. As far as I've read it might come out from the fact, that my class doesn't contain activity. I was reading related topics about getSystemService issue, but I couldn't find any matching solution at all.
I place my code above
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.hardware.Camera;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.TextView;
public class MainActivity extends AppCompatActivity {
private Camera mCamera;
private CameraView mCameraView;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
#Override
protected void onResume(){
super.onResume();
try{
mCamera = Camera.open(1);
mCameraView = new CameraView(this, mCamera);
setContentView(mCameraView);
} catch (Exception e){
finish();
}
}
#Override
protected void onPause(){
if(mCamera != null){
mCamera.release();
mCamera = null;
}
super.onPause();
}
}
The bold one of the class above is the one I'm trying to figure out
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Surface;
import android.view.Display;
import android.view.WindowManager;
import java.io.IOException;
import android.app.Activity;
import java.util.List;
public class CameraView extends SurfaceView implements SurfaceHolder.Callback{
private Camera mCamera;
private View mView;
private WindowManager display;
public CameraView(Context context, Camera mCamera) {
super(context);
this.mCamera = mCamera;
mCamera.setDisplayOrientation(90);
SurfaceHolder holder = getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try{
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (IOException e) {
Log.e("cameraPreview", "The failure of the camera settings");
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
Camera.Size optionalSize = getOptimalPreviewSize(sizes, width, height);
params.setPreviewSize(optionalSize.width, optionalSize.height);
mCamera.setParameters(params);
boolean isPreviewRunning = true;
if (isPreviewRunning)
{
mCamera.stopPreview();
}
Parameters parameters = mCamera.getParameters();
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
if(display.getRotation() == Surface.ROTATION_0)
{
parameters.setPreviewSize(height, width);
mCamera.setDisplayOrientation(90);
}
if(display.getRotation() == Surface.ROTATION_90)
{
parameters.setPreviewSize(width, height);
}
if(display.getRotation() == Surface.ROTATION_180)
{
parameters.setPreviewSize(height, width);
}
if(display.getRotation() == Surface.ROTATION_270)
{
parameters.setPreviewSize(width, height);
mCamera.setDisplayOrientation(180);
}
mCamera.setParameters(parameters);
previewCamera();
}
public void previewCamera()
{
try
{
mCamera.setPreviewDisplay();
mCamera.startPreview();
boolean isPreviewRunning = true;
}
catch(Exception e)
{
Log.d(APP_CLASS, "Cannot start preview", e);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mCamera.release();
mCamera = null;
}
private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio=(double)h / w;
if (sizes == null) return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
for (Camera.Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
}
Could You possibly tell me how it should be done to avoid this matter?
Thanks!
You need to call getSystemService on a Context, hence, why it doesn't work in your CameraView. You can store the context you already pass in as a field and call getSystemService on that. See your modified code below:
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Surface;
import android.view.Display;
import android.view.WindowManager;
import java.io.IOException;
import android.app.Activity;
import java.util.List;
public class CameraView extends SurfaceView implements SurfaceHolder.Callback{
private Camera mCamera;
private View mView;
private WindowManager display;
private Context mContext;
public CameraView(Context context, Camera mCamera) {
super(context);
mContext = context;
this.mCamera = mCamera;
mCamera.setDisplayOrientation(90);
SurfaceHolder holder = getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try{
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (IOException e) {
Log.e("cameraPreview", "The failure of the camera settings");
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
Camera.Size optionalSize = getOptimalPreviewSize(sizes, width, height);
params.setPreviewSize(optionalSize.width, optionalSize.height);
mCamera.setParameters(params);
boolean isPreviewRunning = true;
if (isPreviewRunning)
{
mCamera.stopPreview();
}
Parameters parameters = mCamera.getParameters();
Display display = ((WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
if(display.getRotation() == Surface.ROTATION_0)
{
parameters.setPreviewSize(height, width);
mCamera.setDisplayOrientation(90);
}
if(display.getRotation() == Surface.ROTATION_90)
{
parameters.setPreviewSize(width, height);
}
if(display.getRotation() == Surface.ROTATION_180)
{
parameters.setPreviewSize(height, width);
}
if(display.getRotation() == Surface.ROTATION_270)
{
parameters.setPreviewSize(width, height);
mCamera.setDisplayOrientation(180);
}
mCamera.setParameters(parameters);
previewCamera();
}
public void previewCamera()
{
try
{
mCamera.setPreviewDisplay();
mCamera.startPreview();
boolean isPreviewRunning = true;
}
catch(Exception e)
{
Log.d(APP_CLASS, "Cannot start preview", e);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mCamera.release();
mCamera = null;
}
private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio=(double)h / w;
if (sizes == null) return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
for (Camera.Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
You should use getActivity() to solve this :
getActivity().getSystemService(Context.AUDIO_SERVICE);
This work for me
context!!.getSystemService(Service.WINDOW_SERVICE) as WindowManager;
Sometimes this is caused when android studio is not able to get a specific file and can be resolved by cleaning and rebuilding the project.
If Cleaning and rebuilding did not work.
Use getSystemService like this.
This worked for me:
TelephonyManager telephonyManager = TelephonyManager)getSystemService(Context.TELEPHONY_SERVICE);
As the title says, this is my problem.
My camera successfully goes to front camera, but on the second button click, it doesn't change back to back view and stays front view.
CameraActivity.java:
package mano.whatever;
import android.app.Activity;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Picture;
import android.graphics.RectF;
import android.hardware.Camera;
import android.media.Image;
import android.net.Uri;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.annotation.Nullable;
import android.util.Base64;
import android.util.Log;
import android.view.View;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.Toast;
import android.os.AsyncTask;
import com.google.android.gms.identity.intents.AddressConstants;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
public class CameraActivity extends Activity {
private Camera.Size mSize = null;
private Camera mCamera = null;
private CameraView mCameraView=null;
private SurfaceView mPreview;
private SurfaceHolder mSurfaceHolder;
Camera.PictureCallback jpegCallback;
final int CAMERA_CAPTURE = 1;
private Camera.PictureCallback mPictureCallback;
boolean isFront;
int camBackId = Camera.CameraInfo.CAMERA_FACING_BACK;
int camFrontId = Camera.CameraInfo.CAMERA_FACING_FRONT;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
if (mCamera == null){
initCamera(0);
}
//btn to close the application
ImageView imgClose = (ImageView) findViewById(R.id.left_button);
imgClose.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
toThanksActivity();
}
});
ImageView photo = (ImageView) findViewById(R.id.photo_button);
photo.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
mCamera.autoFocus(new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean b, Camera camera) {
mCamera.takePicture(shutterCall, PictureCallback, mPicture);
}
});
}
});
ImageView rotate = (ImageView) findViewById(R.id.rotate_camera);
rotate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
rotateCamera();
}
});
}
public void rotateCamera() {
boolean isFront = false;
int CamId = 0;
CamId = Camera.getNumberOfCameras();
int cameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
Camera.CameraInfo currentCamInfo = new Camera.CameraInfo();
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
if (currentCamInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK){
initCamera(1);
}
else{
initCamera(0);
}
}
public void initCamera(int potato){
mCamera = Camera.open(potato);//you can use open(int) to use different cameras
FrameLayout camera_view = (FrameLayout) findViewById(R.id.camera_view);
camera_view.removeView(mCameraView);
if (mCamera != null) {
mCameraView = new CameraView(this, mCamera);//create a SurfaceView to show camera data
camera_view.addView(mCameraView);//add the SurfaceView to the layout
}
}
public void toThanksActivity() {
mCamera.stopPreview();
mCamera.release();
finish();
}
public void toCaptureImage() {
mCamera.takePicture(shutterCall, PictureCallback, mPicture);
}
Camera.ShutterCallback shutterCall = new Camera.ShutterCallback() {
#Override
public void onShutter() {
}
};
Camera.PictureCallback PictureCallback = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] bytes, Camera camera) {
}
};
Camera.PictureCallback mPicture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmss");
String date = dateFormat.format(new Date());
String photoFile = "Picture_" + date + ".jpg";
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "Camera");
String filename = mediaStorageDir.getPath() + File.separator + photoFile;
File pictureFile = new File(filename);
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.flush();
fos.close();
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Bitmap bm = BitmapFactory.decodeFile(filename);
Matrix matrix = new Matrix();
matrix.postRotate(90);
Bitmap rotatedBitmap = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), matrix, true);
FileOutputStream fos2 = new FileOutputStream(pictureFile);
rotatedBitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
byte[] byteArray = stream.toByteArray();
mCamera.stopPreview();
mCamera.release();
Intent intent = new Intent();
intent.putExtra("image", byteArray);
setResult(250, intent);
finish();
} catch (IOException e) {
}
}
};
Camera.AutoFocusCallback myAutoFocusCallback = new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean b, Camera camera) {
}
};
}
CameraView.java:
package mano.whatever;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.io.IOException;
import java.util.List;
public class CameraView extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
private Camera.Size mSize = null;
public CameraView(Context context, Camera camera) {
super(context);
mCamera = camera;
mCamera.setDisplayOrientation(90);
//get the holder and set this class as the callback, so we can get camera data here
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
//when the surface is created, we can set the camera to draw images in this surfaceholder
Camera.Parameters parameters = mCamera.getParameters();
List<Camera.Size> previewSizes = parameters.getSupportedPreviewSizes();
// You need to choose the most appropriate previewSize for your app
Size optimalSize = getOptimalPreviewSize(previewSizes, getResources().getDisplayMetrics().widthPixels, getResources().getDisplayMetrics().heightPixels);
parameters.setPreviewSize(optimalSize.width, optimalSize.height);
mCamera.setParameters(parameters);
mCamera.startPreview();
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i2, int i3) {
//before changing the application orientation, you need to stop the preview, rotate and then start it again
if (mHolder.getSurface() == null)//check if the surface is ready to receive camera data
return;
try {
mCamera.stopPreview();
} catch (Exception e) {
//this will happen when you are trying the camera if it's not running
}
//now, recreate the camera preview
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (IOException e) {
Log.d("ERROR", "Camera error on surfaceChanged " + e.getMessage());
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
//our app has only one screen, so we'll destroy the camera in the surface
//if you are unsing with more screens, please move this code your activity
}
private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio=(double)h / w;
if (sizes == null) return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
for (Camera.Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
}
I know this is a sketchy code, but I'd love to hear some suggestions.
Thank you in advance.:)
EDIT
I completely reworked my code, got rid of the CameraView.java class and it works fluently thanks to Android camera preview freezes when switching cameras?
Haven't try but try something like this, hope it works?
public void rotateCamera() {
boolean isFront = false;
int CamId = 0;
CamId = Camera.getNumberOfCameras();
int cameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
Camera.CameraInfo currentCamInfo = new Camera.CameraInfo();
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
if (currentCamInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK){
camera = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
}
else{
camera = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
}
if (mCamera != null) {
try {
mCamera.setPreviewDisplay(surfaceView.getHolder());
mCamera.startPreview();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
I am currently designing an app that calls the Camera with a simple button click. I want to now change the shape that appears in the center of the camera (e.g. it is now a circle/rectangle depending on the device) to a custom shape. I am making a custom camera using a main activity class and a camera preview class. This is my code so far:
Layout File
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical" android:layout_width="fill_parent"
android:layout_height="fill_parent" android:id="#+id/layout">
<TextView android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:contentDescription="#string/camstring"
android:textSize="24sp" />
<FrameLayout android:id="#+id/preview"
android:layout_weight="1"
android:layout_width="fill_parent"
android:layout_height="0dp">
</FrameLayout>
<Button android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="#+id/buttonClick"
android:contentDescription="#string/action"
android:layout_gravity="center"></Button>
</LinearLayout>
MainActivity
package com.test.Nurse_Cam;
import android.os.Bundle;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.FrameLayout;
public class MainActivity extends Activity {
private static final String TAG = "CameraDemo";
Camera camera;
Preview preview;
Button buttonClick;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
preview = new Preview(this);
((FrameLayout) findViewById(R.id.preview)).addView(preview);
buttonClick = (Button) findViewById(R.id.buttonClick);
buttonClick.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
preview.camera.takePicture(shutterCallback, rawCallback,
jpegCallback);
}
});
Log.d(TAG, "onCreate'd");
}
ShutterCallback shutterCallback = new ShutterCallback() {
public void onShutter() {
Log.d(TAG, "onShutter'd");
}
};
/** Handles data for raw picture */
PictureCallback rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Log.d(TAG, "onPictureTaken - raw");
}
};
/** Handles data for jpeg picture */
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
FileOutputStream outStream = null;
try {
// write to local sandbox file system
// outStream =
// CameraDemo.this.openFileOutput(String.format("%d.jpg",
// System.currentTimeMillis()), 0);
// Or write to sdcard
outStream = new FileOutputStream(String.format(
getString(R.string._sdcard_d_jpg), System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Log.d(TAG, "onPictureTaken - jpeg");
}
};
}
Preview Class
package com.test.Nurse_Cam;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
//import java.util.Iterator;
import java.util.List;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
#SuppressLint("SdCardPath")
class Preview extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "Preview";
//private final static double epsilon = 0.17;
private Size mPreviewSize;
SurfaceHolder mHolder;
public Camera camera;
public List<Size> mSupportedPreviewSizes = camera.getParameters().getSupportedPreviewSizes();
#SuppressWarnings("deprecation")
Preview(Context context) {
super(context);
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
camera = Camera.open();
try {
camera.setPreviewDisplay(holder);
camera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera arg1) {
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(String.format(
"/sdcard/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d(TAG, "onPreviewFrame - wrote bytes: "
+ data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Preview.this.invalidate();
}
});
} catch (IOException e) {
e.printStackTrace();
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Stopping preview because surface will be destroyed upon return
camera.stopPreview();
camera = null;
}
private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio=(double)h / w;
if (sizes == null) return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
for (Camera.Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if (mSupportedPreviewSizes != null) {
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
camera.setParameters(parameters);
camera.startPreview();
}
// This adds a canvas to the app
#Override
public void draw(Canvas canvas) {
super.draw(canvas);
Paint p = new Paint(Color.RED);
Log.d(TAG, "draw");
canvas.drawText("PREVIEW", canvas.getWidth() / 2,
canvas.getHeight() / 2, p);
}
}
The camera preview appears distorted when I use the app on my Galaxy S5 and I am unable to see the shape that usually appears in the center. Any input on the best way to place a custom shape in the center, replacing the usual one, would be appreciated. Thanks!
Use relative layout as parent layout of your XML and keep button into center by using centerInParent=true property hope it will not distorted on any device from now
I got the help from code https://github.com/josnidhin/Android-Camera-Example
But facing some problems like:
How to add functionalities of auto focus and flash. - solved in last edited code
The when screen orientation is 270 so real view shows 180 degree rotated.
Capture view is showing stretched.
The CODE from where i called the custom camera class
Camera cam = Camera.open();
if (null == cam)
{
// no camera exists
Intent i = new Intent(Intent.ACTION_PICK, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(i, RESULT_LOAD_IMAGE);
}
else
{
cam.release();
Intent intent = new Intent(CreateList.this, CamTestActivity.class);
startActivityForResult(intent, TAKE_PICTURE);
}
Custom camera class CamTestActivity.class
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnLongClickListener;
import android.view.ViewGroup.LayoutParams;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.FrameLayout;
import com.example.R;
public class CamTestActivity extends Activity {
private static final String TAG = "CamTestActivity";
Preview preview;
Button buttonClick, buttonCancel;
Camera camera;
String fileName;
Activity act;
Context ctx;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ctx = this;
act = this;
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.take_picture);
preview = new Preview(this, (SurfaceView) findViewById(R.id.surfaceView));
preview.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
((FrameLayout) findViewById(R.id.preview)).addView(preview);
preview.setKeepScreenOn(true);
buttonClick = (Button) findViewById(R.id.buttonClick);
buttonCancel = (Button) findViewById(R.id.buttonCancel);
buttonClick.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
camera.takePicture(shutterCallback, rawCallback, jpegCallback);
}
});
buttonClick.setOnLongClickListener(new OnLongClickListener() {
#Override
public boolean onLongClick(View arg0) {
camera.autoFocus(new AutoFocusCallback() {
#Override
public void onAutoFocus(boolean focus_flag, Camera cam) {
if (focus_flag)
cam.takePicture(shutterCallback, rawCallback, jpegCallback);
}
});
return true;
}
});
buttonCancel.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
finish();
}
});
}
#Override
protected void onResume() {
super.onResume();
// preview.camera = Camera.open();
camera = Camera.open(0);
camera.startPreview();
preview.setCamera(camera);
}
#Override
protected void onPause() {
if (camera != null) {
camera.stopPreview();
preview.setCamera(null);
camera.release();
camera = null;
}
super.onPause();
}
private void resetCam() {
camera.startPreview();
preview.setCamera(camera);
Intent i = new Intent();
i.putExtra("captured_img", fileName);
setResult(RESULT_OK, i);
finish();
}
ShutterCallback shutterCallback = new ShutterCallback() {
public void onShutter() {
// Log.d(TAG, "onShutter'd");
}
};
PictureCallback rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
// Log.d(TAG, "onPictureTaken - raw");
}
};
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
FileOutputStream outStream = null;
try {
// Write to SD Card
fileName = String.format(Environment.getExternalStorageDirectory() + "/FansRave_UploadDeal_Pic.jpg");
outStream = new FileOutputStream(fileName);
outStream.write(data);
outStream.close();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length);
resetCam();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
Log.d(TAG, "onPictureTaken - jpeg");
}
};
}
Preview.class
import java.io.IOException;
import java.util.List;
import android.content.Context;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
class Preview extends ViewGroup implements SurfaceHolder.Callback {
private final String TAG = "Preview";
SurfaceView mSurfaceView;
SurfaceHolder mHolder;
Size mPreviewSize;
List<Size> mSupportedPreviewSizes;
Camera mCamera;
Context mContext;
Preview(Context context, SurfaceView sv) {
super(context);
mContext = context;
mSurfaceView = sv;
mHolder = mSurfaceView.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCamera(Camera camera) {
mCamera = camera;
if (mCamera != null) {
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();
requestLayout();
// get Camera parameters
Camera.Parameters params = mCamera.getParameters();
List<String> focusModes = params.getSupportedFocusModes();
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
// set the focus mode
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
// set Camera parameters
mCamera.setParameters(params);
}
}
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
// We purposely disregard child measurements because act as a
// wrapper to a SurfaceView that center the camera preview instead
// of stretching it.
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if (mSupportedPreviewSizes != null) {
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
}
}
#Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
if (changed && getChildCount() > 0) {
final View child = getChildAt(0);
final int width = r - l;
final int height = b - t;
int previewWidth = width;
int previewHeight = height;
if (mPreviewSize != null) {
previewWidth = mPreviewSize.width;
previewHeight = mPreviewSize.height;
}
// Center the child SurfaceView within the parent.
if (width * previewHeight > height * previewWidth) {
final int scaledChildWidth = previewWidth * height / previewHeight;
child.layout((width - scaledChildWidth) / 2, 0, (width + scaledChildWidth) / 2, height);
} else {
final int scaledChildHeight = previewHeight * width / previewWidth;
child.layout(0, (height - scaledChildHeight) / 2, width, (height + scaledChildHeight) / 2);
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where to draw.
try {
if (mCamera != null) {
if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {
Global.orientation = "LANDSCAPE";
Log.e("Orientation mode", "" + Global.orientation);
} else if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
mCamera.stopPreview();
mCamera.setDisplayOrientation(90);
mCamera.startPreview();
Global.orientation = "PORTRAIT";
Log.e("Orientation mode", "" + Global.orientation);
}
mCamera.setPreviewDisplay(holder);
}
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
mCamera.release();
mCamera = null;
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
private Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.2;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
for (Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find the one match the aspect ratio, ignore the requirement
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (mCamera != null) {
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
// for flash
if (mContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH))
parameters.setFlashMode(Parameters.FLASH_MODE_AUTO);
requestLayout();
mCamera.setParameters(parameters);
mCamera.startPreview();
mCamera.autoFocus(null);
}
}
}
XML file take_picture.xml
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/layout"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical" >
<FrameLayout
android:id="#+id/preview"
android:layout_width="fill_parent"
android:layout_height="0dp"
android:layout_weight="1" >
<SurfaceView
android:id="#+id/surfaceView"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
<RelativeLayout
android:layout_width="wrap_content"
android:layout_height="fill_parent"
android:orientation="vertical" >
<Button
android:id="#+id/buttonClick"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerHorizontal="true"
android:layout_centerVertical="true"
android:background="#drawable/button_gradient" />
<Button
android:id="#+id/buttonCancel"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_alignParentLeft="true"
android:text="Cancel" />
</RelativeLayout>
</FrameLayout>
</LinearLayout>
If you getting problem with custom camera then please use intent camera with below code so image rotation is possible
Bitmap bm = BitmapFactory.decodeStream(new FileInputStream(mFile), null, mBitmapFactoryOptions);
Bitmap bitmap = bm;
ExifInterface exif = new ExifInterface(ImagePath);
int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, 1);
Log.e("ExifInteface .........", "rotation =" + orientation);
// exif.setAttribute(ExifInterface.ORIENTATION_ROTATE_90, 90);
Log.e("orientation", "" + orientation);
Matrix m = new Matrix();
if ((orientation == ExifInterface.ORIENTATION_ROTATE_180)) {
m.postRotate(180);
// m.postScale((float) bm.getWidth(), (float)
// bm.getHeight());
// if(m.preRotate(90)){
Log.e("in orientation", "" + orientation);
bitmap = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), m, true);
return bitmap;
} else if (orientation == ExifInterface.ORIENTATION_ROTATE_90) {
m.postRotate(90);
Log.e("in orientation", "" + orientation);
bitmap = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), m, true);
return bitmap;
} else if (orientation == ExifInterface.ORIENTATION_ROTATE_270) {
m.postRotate(270);
Log.e("in orientation", "" + orientation);
bitmap = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), m, true);
return bitmap;
}