How to display a watermark on the camera preview - java

I'm making a motion detector app on top of this project. I would like to display a watermark image on the camera preview.
I tried this method, but it didn't work for me. Please explain in code, how to show the watermark without having any issue for the motion detection part.
MotionDetection.java
public class MotionDetectionActivity extends SensorsActivity {
private static final String TAG = "MotionDetectionActivity";
private static SurfaceView preview = null;
private static SurfaceHolder previewHolder = null;
private static Camera camera = null;
private static boolean inPreview = false;
private static long mReferenceTime = 0;
private static IMotionDetection detector = null;
private static volatile AtomicBoolean processing = new AtomicBoolean(false);
/**
* {#inheritDoc}
*/
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
preview = (SurfaceView) findViewById(R.id.preview);
previewHolder = preview.getHolder();
previewHolder.addCallback(surfaceCallback);
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
if (Preferences.USE_RGB) {
detector = new RgbMotionDetection();
} else if (Preferences.USE_LUMA) {
detector = new LumaMotionDetection();
} else {
// Using State based (aggregate map)
detector = new AggregateLumaMotionDetection();
}
}
/**
* {#inheritDoc}
*/
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
}
/**
* {#inheritDoc}
*/
#Override
public void onPause() {
super.onPause();
camera.setPreviewCallback(null);
if (inPreview) camera.stopPreview();
inPreview = false;
camera.release();
camera = null;
}
/**
* {#inheritDoc}
*/
#Override
public void onResume() {
super.onResume();
camera = Camera.open();
}
private PreviewCallback previewCallback = new PreviewCallback() {
/**
* {#inheritDoc}
*/
#Override
public void onPreviewFrame(byte[] data, Camera cam) {
if (data == null) return;
Camera.Size size = cam.getParameters().getPreviewSize();
if (size == null) return;
if (!GlobalData.isPhoneInMotion()) {
DetectionThread thread = new DetectionThread(data, size.width, size.height);
thread.start();
}
}
};
private SurfaceHolder.Callback surfaceCallback = new SurfaceHolder.Callback() {
/**
* {#inheritDoc}
*/
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
camera.setPreviewDisplay(previewHolder);
camera.setPreviewCallback(previewCallback);
} catch (Throwable t) {
Log.e("PreviewDemo-surfaceCallback", "Exception in setPreviewDisplay()", t);
}
}
/**
* {#inheritDoc}
*/
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters parameters = camera.getParameters();
Camera.Size size = getBestPreviewSize(width, height, parameters);
if (size != null) {
parameters.setPreviewSize(size.width, size.height);
Log.d(TAG, "Using width=" + size.width + " height=" + size.height);
}
camera.setParameters(parameters);
camera.startPreview();
inPreview = true;
}
/**
* {#inheritDoc}
*/
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// Ignore
}
};
private static Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) {
Camera.Size result = null;
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
if (size.width <= width && size.height <= height) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) result = size;
}
}
}
return result;
}
private static final class DetectionThread extends Thread {
private byte[] data;
private int width;
private int height;
public DetectionThread(byte[] data, int width, int height) {
this.data = data;
this.width = width;
this.height = height;
}
/**
* {#inheritDoc}
*/
#Override
public void run() {
if (!processing.compareAndSet(false, true)) return;
// Log.d(TAG, "BEGIN PROCESSING...");
try {
// Previous frame
int[] pre = null;
if (Preferences.SAVE_PREVIOUS) pre = detector.getPrevious();
// Current frame (with changes)
// long bConversion = System.currentTimeMillis();
int[] img = null;
if (Preferences.USE_RGB) {
img = ImageProcessing.decodeYUV420SPtoRGB(data, width, height);
} else {
img = ImageProcessing.decodeYUV420SPtoLuma(data, width, height);
}
// long aConversion = System.currentTimeMillis();
// Log.d(TAG, "Converstion="+(aConversion-bConversion));
// Current frame (without changes)
int[] org = null;
if (Preferences.SAVE_ORIGINAL && img != null) org = img.clone();
if (img != null && detector.detect(img, width, height)) {
// The delay is necessary to avoid taking a picture while in
// the
// middle of taking another. This problem can causes some
// phones
// to reboot.
long now = System.currentTimeMillis();
if (now > (mReferenceTime + Preferences.PICTURE_DELAY)) {
mReferenceTime = now;
Bitmap previous = null;
if (Preferences.SAVE_PREVIOUS && pre != null) {
if (Preferences.USE_RGB) previous = ImageProcessing.rgbToBitmap(pre, width, height);
else previous = ImageProcessing.lumaToGreyscale(pre, width, height);
}
Bitmap original = null;
if (Preferences.SAVE_ORIGINAL && org != null) {
if (Preferences.USE_RGB) original = ImageProcessing.rgbToBitmap(org, width, height);
else original = ImageProcessing.lumaToGreyscale(org, width, height);
}
Bitmap bitmap = null;
if (Preferences.SAVE_CHANGES) {
if (Preferences.USE_RGB) bitmap = ImageProcessing.rgbToBitmap(img, width, height);
else bitmap = ImageProcessing.lumaToGreyscale(img, width, height);
}
Log.i(TAG, "Saving.. previous=" + previous + " original=" + original + " bitmap=" + bitmap);
Looper.prepare();
new SavePhotoTask().execute(previous, original, bitmap);
} else {
Log.i(TAG, "Not taking picture because not enough time has passed since the creation of the Surface");
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
processing.set(false);
}
// Log.d(TAG, "END PROCESSING...");
processing.set(false);
}
};
private static final class SavePhotoTask extends AsyncTask<Bitmap, Integer, Integer> {
/**
* {#inheritDoc}
*/
#Override
protected Integer doInBackground(Bitmap... data) {
for (int i = 0; i < data.length; i++) {
Bitmap bitmap = data[i];
String name = String.valueOf(System.currentTimeMillis());
if (bitmap != null) save(name, bitmap);
}
return 1;
}
private void save(String name, Bitmap bitmap) {
File photo = new File(Environment.getExternalStorageDirectory(), name + ".jpg");
if (photo.exists()) photo.delete();
try {
FileOutputStream fos = new FileOutputStream(photo.getPath());
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
}
}
main.xml
<?xml version="1.0" encoding="utf-8"?>
<SurfaceView xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/preview"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
</SurfaceView>

A workaround option is to overlay the Activity XML file with another XML file which contains the watermark image. To do so:
Create a new Layout file inside the layout folder. For eg:
overlay.xml
Insert an ImageView inside it, something like:
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:app="http://schemas.android.com/apk/res-auto">
<ImageView
android:id="#+id/imageView1"
android:layout_centerInParent="true"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="#drawable/android" />
</RelativeLayout>
Then inside the Activity java file, ie MotionDetector.java file ,
create a new method addView() :
private void addView()
{
controlInflater = LayoutInflater.from(getBaseContext());
View viewControl = controlInflater.inflate(R.layout.overlay, null);
LayoutParams layoutParamsControl = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
this.addContentView(viewControl, layoutParamsControl);
}
4) Finally invoke the addView() method from onCreate() to add the image:
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
preview = (SurfaceView) findViewById(R.id.preview);
previewHolder = preview.getHolder();
previewHolder.addCallback(surfaceCallback);
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
addView();
Then end result would be an image overlay above the SurfaceView. Subject to the quality of the watermark image, the rendered quality of the watermark shall seem original. Hope it helps.

Related

App crashes on pressing the back button from a Camera Activity

I'm trying to make a motion detector app that captures images on the motion detection. Its working fine & saving the images. The problem is that the app crashes when I press back button from the camera activity to return to the home activity. How to fix it ?
Here is my code:
public class MotionDetectionActivity extends SensorsActivity {
private static final String TAG = "MotionDetectionActivity";
private static final String ENABLE_MOTION_DETECTION="switch_md";
private static SurfaceView preview = null;
private static SurfaceHolder previewHolder = null;
private static Camera camera = null;
private static boolean inPreview = false;
private static long mReferenceTime = 0;
private static IMotionDetection detector = null;
public static MediaPlayer song;
public static Vibrator mVibrator;
private static volatile AtomicBoolean processing = new AtomicBoolean(false);
public int MY_PERMISSIONS_REQUEST_CAMERA;
/**
* {#inheritDoc}
*/
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.main);
SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(this);
boolean enablemotionpref = sharedPref.getBoolean(ENABLE_MOTION_DETECTION, true);
song = MediaPlayer.create(this, R.raw.sound);
mVibrator = (Vibrator)this.getSystemService(VIBRATOR_SERVICE);
preview = (SurfaceView) findViewById(R.id.preview);
previewHolder = preview.getHolder();
previewHolder.addCallback(surfaceCallback);
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
if (enablemotionpref) {
if (Preferences.USE_RGB) {
detector = new RgbMotionDetection();
} else if (Preferences.USE_LUMA) {
detector = new LumaMotionDetection();
} else {
// Using State based (aggregate map)
detector = new AggregateLumaMotionDetection();
}
}
}
/**
* {#inheritDoc}
*/
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
}
/**
* {#inheritDoc}
*/
#Override
public void onPause() {
super.onPause();
if(song!=null && song.isPlaying())
{
song.stop();}
camera.setPreviewCallback(null);
if (inPreview) camera.stopPreview();
inPreview = false;
camera.release();
camera = null;
}
/**
* {#inheritDoc}
*/
#Override
public void onResume() {
super.onResume();
camera = Camera.open();
}
private PreviewCallback previewCallback = new PreviewCallback() {
/**
* {#inheritDoc}
*/
#Override
public void onPreviewFrame(byte[] data, Camera cam) {
if (data == null) return;
Camera.Size size = cam.getParameters().getPreviewSize();
if (size == null) return;
if (!GlobalData.isPhoneInMotion()) {
DetectionThread thread = new DetectionThread(data, size.width, size.height);
thread.start();
}
}
};
private SurfaceHolder.Callback surfaceCallback = new SurfaceHolder.Callback() {
/**
* {#inheritDoc}
*/
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
camera.setPreviewDisplay(previewHolder);
camera.setPreviewCallback(previewCallback);
} catch (Throwable t) {
Log.e("Callback", "Exception in setPreviewDisplay()", t);
}
}
/**
* {#inheritDoc}
*/
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters parameters = camera.getParameters();
Camera.Size size = getBestPreviewSize(width, height, parameters);
if (size != null) {
parameters.setPreviewSize(size.width, size.height);
Log.d(TAG, "Using width=" + size.width + " height=" + size.height);
}
camera.setParameters(parameters);
camera.startPreview();
inPreview = true;
}
/**
* {#inheritDoc}
*/
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// Ignore
}
};
private static Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) {
Camera.Size result = null;
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
if (size.width <= width && size.height <= height) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) result = size;
}
}
}
return result;
}
private static final class DetectionThread extends Thread {
private byte[] data;
private int width;
private int height;
public DetectionThread(byte[] data, int width, int height) {
this.data = data;
this.width = width;
this.height = height;
}
/**
* {#inheritDoc}
*/
#Override
public void run() {
if (!processing.compareAndSet(false, true)) return;
// Log.d(TAG, "BEGIN PROCESSING...");
try {
// Previous frame
int[] pre = null;
if (Preferences.SAVE_PREVIOUS) pre = detector.getPrevious();
// Current frame (with changes)
// long bConversion = System.currentTimeMillis();
int[] img = null;
if (Preferences.USE_RGB) {
img = ImageProcessing.decodeYUV420SPtoRGB(data, width, height);
if (img != null && detector.detect(img, width, height))
{
if(song!=null && !song.isPlaying())
{
song.start();
mVibrator.vibrate(50);
}
}
else
{
if(song!=null && song.isPlaying())
{
song.pause();
}
}
}
// Current frame (without changes)
int[] org = null;
if (Preferences.SAVE_ORIGINAL && img != null) org = img.clone();
if (img != null && detector.detect(img, width, height)) {
// The delay is necessary to avoid taking a picture while in
// the
// middle of taking another. This problem can causes some
// phones
// to reboot.
long now = System.currentTimeMillis();
if (now > (mReferenceTime + Preferences.PICTURE_DELAY)) {
mReferenceTime = now;
Bitmap previous = null;
if (Preferences.SAVE_PREVIOUS && pre != null) {
if (Preferences.USE_RGB) previous = ImageProcessing.rgbToBitmap(pre, width, height);
else previous = ImageProcessing.lumaToGreyscale(pre, width, height);
}
Bitmap original = null;
if (Preferences.SAVE_ORIGINAL && org != null) {
if (Preferences.USE_RGB) original = ImageProcessing.rgbToBitmap(org, width, height);
else original = ImageProcessing.lumaToGreyscale(org, width, height);
}
Bitmap bitmap = null;
if (Preferences.SAVE_CHANGES) {
if (Preferences.USE_RGB) bitmap = ImageProcessing.rgbToBitmap(img, width, height);
else bitmap = ImageProcessing.lumaToGreyscale(img, width, height);
}
Log.i(TAG, "Saving.. previous=" + previous + " original=" + original + " bitmap=" + bitmap);
Looper.prepare();
new SavePhotoTask().execute(previous, original, bitmap);
} else {
Log.i(TAG, "Not taking picture because not enough time has passed since the creation of the Surface");
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
processing.set(false);
}
// Log.d(TAG, "END PROCESSING...");
processing.set(false);
}
};
private static final class SavePhotoTask extends AsyncTask<Bitmap, Integer, Integer> {
/**
* {#inheritDoc}
*/
#Override
protected Integer doInBackground(Bitmap... data) {
for (int i = 0; i < data.length; i++) {
Bitmap bitmap = data[i];
String name = "MotDet_"+String.valueOf(System.currentTimeMillis());
if (bitmap != null) createDirectoryAndSaveFile(name, bitmap);
}
return 1;
}
private void createDirectoryAndSaveFile(String name, Bitmap bitmap) {
File folder = new File(Environment.getExternalStorageDirectory() +
File.separator + "MD");//here you have created different name
boolean success = true;
if (!folder.exists()) {
success = folder.mkdirs();
}
if (success) {
// Do something on success
} else {
// Do something else on failure
}
File photo = new File(folder.getAbsolutePath(), name+ ".jpg"); //use path of above created folder
if (photo.exists()) {
photo.delete();
}
try {
FileOutputStream out = new FileOutputStream(photo.getPath());
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, out);
out.flush();
out.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
HomeActivity
public class HomeActivity extends AppCompatActivity {
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.home_layout);
Button bt1 = (Button) findViewById(R.id.button);
Button bt2= (Button)findViewById(R.id.button1);
bt1.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Intent i = new Intent(view.getContext(), MotionDetectionActivity.class);
startActivity(i);
}
});
bt2.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View vew) {
Intent b=new Intent(vew.getContext(),SettingsActivity.class);
startActivity(b);
}
});
}
}
Stacktrace
handle like this an see if it still crashes
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if(camera != null) {
Camera.Parameters parameters = camera.getParameters();
Camera.Size size = getBestPreviewSize(width, height, parameters);
if (size != null) {
parameters.setPreviewSize(size.width, size.height);
Log.d(TAG, "Using width=" + size.width + " height=" + size.height);
}
camera.setParameters(parameters);
camera.startPreview();
inPreview = true;
}
}

logcat error can't find the error

This is my main activity, this is the code for camera filter. I didn't figure it out any error. but when i run this code it is crashing in my real device.can any one help in this please where is the error and what is error belongs to. I am attaching the log cat error as a pic. please find it
public class MainActivity extends AppCompatActivity {
private static final int REQUEST_CAMERA_PERSIMMISON = 101;
private CameraRenderer renderer;
private TextureView textureView;
private int filterId = R.id.filter0;
/**
* ATTENTION: This was auto-generated to implement the App Indexing API.
* See https://g.co/AppIndexing/AndroidStudio for more information.
*/
private GoogleApiClient client;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setTitle("Original");
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager
.PERMISSION_GRANTED) {
if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) {
Toast.makeText(this, "Camera acess is required.", Toast.LENGTH_SHORT).show();
} else {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERSIMMISON);
}
} else {
setupCameraPreviewView();
}
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client = new GoogleApiClient.Builder(this).addApi(AppIndex.API).build();
}
void setupCameraPreviewView() {
renderer = new CameraRenderer(this);
textureView = (TextureView) findViewById(R.id.textureView);
assert textureView != null;
textureView.setSurfaceTextureListener(renderer);
textureView.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
renderer.setSelectedFilter(R.id.filter0);
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_CANCEL:
renderer.setSelectedFilter(filterId);
break;
}
return true;
}
});
textureView.addOnLayoutChangeListener(new View.OnLayoutChangeListener() {
#Override
public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) {
renderer.onSurfaceTextureSizeChanged(null, v.getWidth(), v.getHeight());
}
});
}
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.filter, menu);
return true;
}
public boolean onOptionsItemSelected(MenuItem item) {
filterId = item.getItemId();
if (filterId == R.id.capture) {
Toast.makeText(this, capture() ? "The capture has been saved to your sdcard root path." : "Saved failed", Toast.LENGTH_SHORT).show();
return true;
}
setTitle(item.getTitle());
if (renderer != null)
renderer.setSelectedFilter(filterId);
return true;
}
private boolean capture() {
String mPath = genSaveFileName(getTitle().toString() + "_", ".png");
File imageFile = new File(mPath);
if (imageFile.exists()) {
imageFile.delete();
}
Bitmap bitmap = textureView.getBitmap();
OutputStream outputStream = null;
try {
outputStream = new FileOutputStream(imageFile);
bitmap.compress(Bitmap.CompressFormat.PNG, 90, outputStream);
outputStream.flush();
outputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
return false;
} catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
private String genSaveFileName(String prefix, String suffix) {
Date date = new Date();
SimpleDateFormat dateFormat1 = new SimpleDateFormat("yyyyMMdd_hhmmss");
String timeString = dateFormat1.format(date);
String externalPath = Environment.getExternalStorageDirectory().toString();
return externalPath + "/" + prefix + timeString + suffix;
}
#Override
public void onStart() {
super.onStart();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client.connect();
Action viewAction = Action.newAction(
Action.TYPE_VIEW, // TODO: choose an action type.
"Main Page", // TODO: Define a title for the content shown.
// TODO: If you have web page content that matches this app activity's content,
// make sure this auto-generated web page URL is correct.
// Otherwise, set the URL to null.
Uri.parse("http://host/path"),
// TODO: Make sure this auto-generated app URL is correct.
Uri.parse("android-app://giri.com.camerafilter/http/host/path")
);
AppIndex.AppIndexApi.start(client, viewAction);
}
#Override
public void onStop() {
super.onStop();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
Action viewAction = Action.newAction(
Action.TYPE_VIEW, // TODO: choose an action type.
"Main Page", // TODO: Define a title for the content shown.
// TODO: If you have web page content that matches this app activity's content,
// make sure this auto-generated web page URL is correct.
// Otherwise, set the URL to null.
Uri.parse("http://host/path"),
// TODO: Make sure this auto-generated app URL is correct.
Uri.parse("android-app://giri.com.camerafilter/http/host/path")
);
AppIndex.AppIndexApi.end(client, viewAction);
client.disconnect();
}
}
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
>
<TextView
android:id="#+id/textureView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
/>
</RelativeLayout>
This is my CameraRenderer.java
public class CameraRenderer extends Thread implements TextureView.SurfaceTextureListener {
private static final String TAG = "CameraRenderer";
private static final int EGL_OPENGL_ES2_BIT = 4;
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
private static final int DRAW_INTERVAL = 1000 / 30;
private Context context;
private SurfaceTexture surfaceTexture;
private int gwidth, gheight;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface;
private EGLContext eglContext;
private EGL10 egl10;
private Camera camera;
private SurfaceTexture cameraSurfaceTexture;
private int cameraTextureId;
private CameraFilter selectedFilter;
private SparseArray<CameraFilter> cameraFilterMap = new SparseArray<>();
public CameraRenderer(Context context) {
this.context = context;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
GLES20.glViewport(0, 0, gwidth = width, gheight = height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
if (camera != null) {
camera.stopPreview();
camera.release();
}
interrupt();
CameraFilter.release();
return true;
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
if (isAlive()) {
interrupt();
}
surfaceTexture = surface;
GLES20.glViewport(0, 0, gwidth = width, gheight = height);
// Open camera
Pair<Camera.CameraInfo, Integer> backCamera = getBackCamera();
final int backCameraId = backCamera.second;
camera = Camera.open(backCameraId);
// Start rendering
start();
}
public void setSelectedFilter(int id) {
selectedFilter = cameraFilterMap.get(id);
selectedFilter.onAttach();
}
#Override
public void run() {
initGL(surfaceTexture);
// Setup camera filters map
cameraFilterMap.append(R.id.filter0, new OriginalFilter(context));
cameraFilterMap.append(R.id.filter1, new EdgeDetectionFilter(context));
cameraFilterMap.append(R.id.filter2, new PixelizeFilter(context));
cameraFilterMap.append(R.id.filter3, new EMInterferenceFilter(context));
cameraFilterMap.append(R.id.filter4, new TrianglesMosaicFilter(context));
cameraFilterMap.append(R.id.filter5, new LegofiedFilter(context));
cameraFilterMap.append(R.id.filter6, new TileMosaicFilter(context));
cameraFilterMap.append(R.id.filter7, new BlueorangeFilter(context));
cameraFilterMap.append(R.id.filter8, new ChromaticAberrationFilter(context));
cameraFilterMap.append(R.id.filter9, new BasicDeformFilter(context));
cameraFilterMap.append(R.id.filter10, new ContrastFilter(context));
cameraFilterMap.append(R.id.filter11, new NoiseWarpFilter(context));
cameraFilterMap.append(R.id.filter12, new RefractionFilter(context));
cameraFilterMap.append(R.id.filter13, new MappingFilter(context));
cameraFilterMap.append(R.id.filter14, new CrosshatchFilter(context));
cameraFilterMap.append(R.id.filter15, new LichtensteinEsqueFilter(context));
cameraFilterMap.append(R.id.filter16, new AsciiArtFilter(context));
cameraFilterMap.append(R.id.filter17, new MoneyFilter(context));
cameraFilterMap.append(R.id.filter18, new CrackedFilter(context));
cameraFilterMap.append(R.id.filter19, new PolygonizationFilter(context));
cameraFilterMap.append(R.id.filter20, new JFAVoronoiFilter(context));
setSelectedFilter(R.id.filter0);
// Create texture for camera preview
cameraTextureId = MyGLUtils.genTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
cameraSurfaceTexture = new SurfaceTexture(cameraTextureId);
// Start camera preview
try {
camera.setPreviewTexture(cameraSurfaceTexture);
camera.startPreview();
} catch (IOException ioe) {
// Something bad happened
}
// Render loop
while (!Thread.currentThread().isInterrupted()) {
try {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Update the camera preview texture
synchronized (this) {
cameraSurfaceTexture.updateTexImage();
}
// Draw camera preview
selectedFilter.draw(cameraTextureId, gwidth, gheight);
// Flush
GLES20.glFlush();
egl10.eglSwapBuffers(eglDisplay, eglSurface);
Thread.sleep(DRAW_INTERVAL);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
cameraSurfaceTexture.release();
GLES20.glDeleteTextures(1, new int[]{cameraTextureId}, 0);
}
private void initGL(SurfaceTexture texture) {
egl10 = (EGL10) EGLContext.getEGL();
eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
throw new RuntimeException("eglGetDisplay failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
}
int[] version = new int[2];
if (!egl10.eglInitialize(eglDisplay, version)) {
throw new RuntimeException("eglInitialize failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
}
int[] configsCount = new int[1];
EGLConfig[] configs = new EGLConfig[1];
int[] configSpec = {
EGL10.EGL_RENDERABLE_TYPE,
EGL_OPENGL_ES2_BIT,
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8,
EGL10.EGL_DEPTH_SIZE, 0,
EGL10.EGL_STENCIL_SIZE, 0,
EGL10.EGL_NONE
};
EGLConfig eglConfig = null;
if (!egl10.eglChooseConfig(eglDisplay, configSpec, configs, 1, configsCount)) {
throw new IllegalArgumentException("eglChooseConfig failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
} else if (configsCount[0] > 0) {
eglConfig = configs[0];
}
if (eglConfig == null) {
throw new RuntimeException("eglConfig not initialized");
}
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
eglContext = egl10.eglCreateContext(eglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
eglSurface = egl10.eglCreateWindowSurface(eglDisplay, eglConfig, texture, null);
if (eglSurface == null || eglSurface == EGL10.EGL_NO_SURFACE) {
int error = egl10.eglGetError();
if (error == EGL10.EGL_BAD_NATIVE_WINDOW) {
Log.e(TAG, "eglCreateWindowSurface returned EGL10.EGL_BAD_NATIVE_WINDOW");
return;
}
throw new RuntimeException("eglCreateWindowSurface failed " + android.opengl.GLUtils.getEGLErrorString(error));
}
if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException("eglMakeCurrent failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
}
}
private Pair<Camera.CameraInfo, Integer> getBackCamera() {
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
final int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; ++i) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
return new Pair<>(cameraInfo, i);
}
}
return null;
}
}
**This is my CameraFilter.java file**
public abstract class CameraFilter {
static final float SQUARE_COORDS[] = {
1.0f, -1.0f,
-1.0f, -1.0f,
1.0f, 1.0f,
-1.0f, 1.0f,
};
static final float TEXTURE_COORDS[] = {
1.0f, 0.0f,
0.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
};
static FloatBuffer VERTEX_BUF, TEXTURE_COORD_BUF;
static int PROGRAM = 0;
private static final int BUF_ACTIVE_TEX_UNIT = GLES20.GL_TEXTURE8;
private static RenderBuffer CAMERA_RENDER_BUF;
private static final float ROATED_TEXTURE_COORDS[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
};
private static FloatBuffer ROATED_TEXTURE_COORD_BUF;
final long START_TIME = System.currentTimeMillis();
int iFrame = 0;
public CameraFilter(Context context) {
// Setup default Buffers
if (VERTEX_BUF == null) {
VERTEX_BUF = ByteBuffer.allocateDirect(SQUARE_COORDS.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
VERTEX_BUF.put(SQUARE_COORDS);
VERTEX_BUF.position(0);
}
if (TEXTURE_COORD_BUF == null) {
TEXTURE_COORD_BUF = ByteBuffer.allocateDirect(TEXTURE_COORDS.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
TEXTURE_COORD_BUF.put(TEXTURE_COORDS);
TEXTURE_COORD_BUF.position(0);
}
if (ROATED_TEXTURE_COORD_BUF == null) {
ROATED_TEXTURE_COORD_BUF = ByteBuffer.allocateDirect(ROATED_TEXTURE_COORDS.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
ROATED_TEXTURE_COORD_BUF.put(ROATED_TEXTURE_COORDS);
ROATED_TEXTURE_COORD_BUF.position(0);
}
if (PROGRAM == 0) {
PROGRAM = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.original_rtt);
}
}
#CallSuper
public void onAttach() {
iFrame = 0;
}
final public void draw(int cameraTexId, int canvasWidth, int canvasHeight) {
// TODO move?
// Create camera render buffer
if (CAMERA_RENDER_BUF == null ||
CAMERA_RENDER_BUF.getWidth() != canvasWidth ||
CAMERA_RENDER_BUF.getHeight() != canvasHeight) {
CAMERA_RENDER_BUF = new RenderBuffer(canvasWidth, canvasHeight, BUF_ACTIVE_TEX_UNIT);
}
// Use shaders
GLES20.glUseProgram(PROGRAM);
int iChannel0Location = GLES20.glGetUniformLocation(PROGRAM, "iChannel0");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexId);
GLES20.glUniform1i(iChannel0Location, 0);
int vPositionLocation = GLES20.glGetAttribLocation(PROGRAM, "vPosition");
GLES20.glEnableVertexAttribArray(vPositionLocation);
GLES20.glVertexAttribPointer(vPositionLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, VERTEX_BUF);
int vTexCoordLocation = GLES20.glGetAttribLocation(PROGRAM, "vTexCoord");
GLES20.glEnableVertexAttribArray(vTexCoordLocation);
GLES20.glVertexAttribPointer(vTexCoordLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, ROATED_TEXTURE_COORD_BUF);
// Render to texture
CAMERA_RENDER_BUF.bind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
CAMERA_RENDER_BUF.unbind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
onDraw(CAMERA_RENDER_BUF.getTexId(), canvasWidth, canvasHeight);
iFrame ++;
}
abstract void onDraw(int cameraTexId, int canvasWidth, int canvasHeight);
void setupShaderInputs(int program, int[] iResolution, int[] iChannels, int[][] iChannelResolutions) {
setupShaderInputs(program, VERTEX_BUF, TEXTURE_COORD_BUF, iResolution, iChannels, iChannelResolutions);
}
void setupShaderInputs(int program, FloatBuffer vertex, FloatBuffer textureCoord, int[] iResolution, int[] iChannels, int[][] iChannelResolutions) {
GLES20.glUseProgram(program);
int iResolutionLocation = GLES20.glGetUniformLocation(program, "iResolution");
GLES20.glUniform3fv(iResolutionLocation, 1,
FloatBuffer.wrap(new float[]{(float) iResolution[0], (float) iResolution[1], 1.0f}));
float time = ((float) (System.currentTimeMillis() - START_TIME)) / 1000.0f;
int iGlobalTimeLocation = GLES20.glGetUniformLocation(program, "iGlobalTime");
GLES20.glUniform1f(iGlobalTimeLocation, time);
int iFrameLocation = GLES20.glGetUniformLocation(program, "iFrame");
GLES20.glUniform1i(iFrameLocation, iFrame);
int vPositionLocation = GLES20.glGetAttribLocation(program, "vPosition");
GLES20.glEnableVertexAttribArray(vPositionLocation);
GLES20.glVertexAttribPointer(vPositionLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, vertex);
int vTexCoordLocation = GLES20.glGetAttribLocation(program, "vTexCoord");
GLES20.glEnableVertexAttribArray(vTexCoordLocation);
GLES20.glVertexAttribPointer(vTexCoordLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, textureCoord);
for (int i = 0; i < iChannels.length; i ++) {
int sTextureLocation = GLES20.glGetUniformLocation(program, "iChannel" + i);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, iChannels[i]);
GLES20.glUniform1i(sTextureLocation, i);
}
float _iChannelResolutions[] = new float[iChannelResolutions.length * 3];
for (int i = 0; i < iChannelResolutions.length; i++) {
_iChannelResolutions[i*3] = iChannelResolutions[i][0];
_iChannelResolutions[i*3 + 1] = iChannelResolutions[i][1];
_iChannelResolutions[i*3 + 2] = 1.0f;
}
int iChannelResolutionLocation = GLES20.glGetUniformLocation(program, "iChannelResolution");
GLES20.glUniform3fv(iChannelResolutionLocation,
_iChannelResolutions.length, FloatBuffer.wrap(_iChannelResolutions));
}
public static void release() {
PROGRAM = 0;
CAMERA_RENDER_BUF = null;
}
}
Still getting an error
E/AndroidRuntime: FATAL EXCEPTION: Thread-1759 Process: giri.com.camerafilter, PID: 21244
java.lang.NullPointerException: Attempt to invoke virtual method 'void giri.com.camerafilter.RenderBuffer.unbind()' on a null object reference
at giri.com.camerafilter.filter.CameraFilter.draw(CameraFilter.java:126)
at giri.com.camerafilter.CameraRenderer.run(CameraRenderer.java:165)
You are getting a ClassCastException because you're trying to cast a TextView to TextureView.Change:
<TextView
android:id="#+id/textureView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
/>
to <TextureView...>:
<TextureView
android:id="#+id/textureView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
/>
There is classCastException which states that You are casting one class to another class which have different signatures. You have used AppCompatTextview in your xml and You are Casting it to TextView
you might be doing this
AppCompatTextView textView = (TextView) findViewById(R.id.your_id);
and you should do as follows
AppCompatTextView textView = (AppCompatTextView) findViewById(R.id.your_id);

ImageView still visible after setVisibility(View.INVISIBLE)

I am trying to make a camera app that shows an overlay when a picture is taken.
When this overlay is shown the other UI components (except for the FrameLayout that shows the picture) should go invisible.
But it seems that while my 2 imagebuttons go invisble, my imageview(ivCompass) doesn't.
Here is the code that gets called when a picture is taken
Camera.PictureCallback mPicture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
//create a new intent...
String path = createFile(data);
intent = new Intent();
intent.putExtra("path", path);
mBearingProvider.updateBearing();
bearing = mBearingProvider.getBearing();
cardinalDirection = bearingToString(bearing);
//((TextView) findViewById(R.id.tvPicDirection)).setText(cardinalDirection);
Log.e("Direction", cardinalDirection + "," + bearing);
findViewById(R.id.btnFlash).setVisibility(View.INVISIBLE);
findViewById(R.id.btnCapture).setVisibility(View.INVISIBLE);
findViewById(R.id.ivCompass).setVisibility(View.INVISIBLE);
findViewById(R.id.pictureOverlay).setVisibility(View.VISIBLE);
}
};
And here is the layout file
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<FrameLayout
android:id="#+id/camera_preview"
android:layout_width="match_parent"
android:layout_height="match_parent">
</FrameLayout>
<ImageButton
android:id="#+id/btnFlash"
android:layout_width="50dp"
android:layout_height="50dp"
android:layout_centerVertical="true"
android:layout_margin="10dp"
android:src="#drawable/camera_flash_on"
android:background="#drawable/circle_flash"
android:onClick="changeFlashMode"/>
<ImageButton
android:id="#+id/btnCapture"
android:layout_width="50dp"
android:layout_height="50dp"
android:layout_alignParentRight="true"
android:layout_centerVertical="true"
android:layout_margin="10dp"
android:src="#drawable/icon_camera"
android:background="#drawable/circle_camera"/>
<ImageView
android:id="#+id/ivCompass"
android:layout_width="100dp"
android:layout_height="100dp"
android:layout_alignParentRight="true"
android:src="#drawable/camera_compass"
android:background="#android:color/transparent"/>
<RelativeLayout
android:id="#+id/pictureOverlay"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_margin="20dp"
android:background="#color/alphaBlack"
android:visibility="invisible">
</RelativeLayout>
I think It's just a mistake with naming, syntax or something like that, but I can't seem to find it.
EDIT:
Here is the entire Activity
public class CameraActivity extends AppCompatActivity implements BearingToNorthProvider.ChangeEventListener {
private Camera mCamera;
private CameraView mCameraView;
private float mDist = 0f;
private String flashMode;
private ImageButton flashButton;
private Intent intent;
private BearingToNorthProvider mBearingProvider;
private double bearing;
private double currentBearing = 0d;
private String cardinalDirection = "?";
private final int REQUEST_CODE_ASK_PERMISSIONS = 2;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
mCamera = getCameraInstance();
mCameraView = new CameraView(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(mCameraView);
ImageButton captureButton = (ImageButton) findViewById(R.id.btnCapture);
captureButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Camera.Parameters params = mCamera.getParameters();
params.setFlashMode(flashMode);
mCamera.setParameters(params);
mCamera.takePicture(null, null, mPicture);
}
});
SharedPreferences sharedPref = this.getSharedPreferences(getString(R.string.apiKey), Context.MODE_PRIVATE);
flashMode = sharedPref.getString(getString(R.string.flashMode), Camera.Parameters.FLASH_MODE_OFF);
flashButton = (ImageButton) findViewById(R.id.btnFlash);
setFlashButton();
mBearingProvider = new BearingToNorthProvider(this,this);
mBearingProvider.setChangeEventListener(this);
mBearingProvider.start();
}
#Override
protected void onPause() {
super.onPause();
mBearingProvider.stop();
}
/**
* Helper method to access the camera returns null if it cannot get the
* camera or does not exist
*
* #return the instance of the camera
*/
private Camera getCameraInstance() {
Camera camera = null;
try {
camera = Camera.open();
} catch (Exception e) {
Log.e("CamException", e.toString());
}
return camera;
}
Camera.PictureCallback mPicture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
//create a new intent...
String path = createFile(data);
intent = new Intent();
intent.putExtra("path", path);
mBearingProvider.updateBearing();
bearing = mBearingProvider.getBearing();
cardinalDirection = bearingToString(bearing);
//((TextView) findViewById(R.id.tvPicDirection)).setText(cardinalDirection);
Log.e("Direction", cardinalDirection + "," + bearing);
findViewById(R.id.btnFlash).setVisibility(View.INVISIBLE);
findViewById(R.id.btnCapture).setVisibility(View.INVISIBLE);
findViewById(R.id.ivCompass).setVisibility(View.INVISIBLE);
findViewById(R.id.pictureOverlay).setVisibility(View.VISIBLE);
}
};
private void confirmPicture(View v) {
/*String direction = String.valueOf(((TextView) findViewById(R.id.tvPicDirection)).getText());
String description = String.valueOf(((EditText) findViewById(R.id.tvPicDescription)).getText());
intent.putExtra("direction", direction);
intent.putExtra("description", description);*/
//close this Activity...
setResult(Activity.RESULT_OK, intent);
finish();
}
//region File Methods
/**
* Method that creates a file from the given byte array and saves the file in the Pictures Directory
* #param data is the array of bytes that represent the picture taken by the camera
* #return the path of created file
*/
private String createFile(byte[] data){
checkFilePermissions();
File picFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES) + File.separator + "tempPic.jpg" + File.separator);
String path = picFile.getPath();
try {
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(picFile));
bos.write(data);
bos.flush();
bos.close();
return path;
} catch (IOException e) {
e.printStackTrace();
return "";
}
}
/**
* Checks the permission for reading to and writing from the external storage
*/
private void checkFilePermissions() {
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) {
int hasWriteExternalStoragePermission = checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE);
if (hasWriteExternalStoragePermission != PackageManager.PERMISSION_GRANTED) {
requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},
REQUEST_CODE_ASK_PERMISSIONS);
return;
}
}
}
//endregion
//region Zoom Methods
#Override
public boolean onTouchEvent(MotionEvent event) {
// Get the pointer ID
Camera.Parameters params = mCamera.getParameters();
int action = event.getAction();
if (event.getPointerCount() > 1) {
// handle multi-touch events
if (action == MotionEvent.ACTION_POINTER_DOWN) {
mDist = getFingerSpacing(event);
} else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) {
mCamera.cancelAutoFocus();
handleZoom(event, params);
}
} else {
// handle single touch events
if (action == MotionEvent.ACTION_UP) {
handleFocus(event, params);
}
}
return true;
}
private void handleZoom(MotionEvent event, Camera.Parameters params) {
int maxZoom = params.getMaxZoom();
int zoom = params.getZoom();
float newDist = getFingerSpacing(event);
if (newDist > mDist) {
//zoom in
if (zoom < maxZoom)
zoom++;
} else if (newDist < mDist) {
//zoom out
if (zoom > 0)
zoom--;
}
mDist = newDist;
params.setZoom(zoom);
mCamera.setParameters(params);
}
public void handleFocus(MotionEvent event, Camera.Parameters params) {
int pointerId = event.getPointerId(0);
int pointerIndex = event.findPointerIndex(pointerId);
// Get the pointer's current position
float x = event.getX(pointerIndex);
float y = event.getY(pointerIndex);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
mCamera.autoFocus(new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean b, Camera camera) {
// currently set to auto-focus on single touch
}
});
}
}
/** Determine the space between the first two fingers */
private float getFingerSpacing(MotionEvent event) {
double x = event.getX(0) - event.getX(1);
double y = event.getY(0) - event.getY(1);
return (float) Math.sqrt(x * x + y * y);
}
//endregion
//region Flash Methods
public void changeFlashMode(View v) {
switch (flashMode) {
case Camera.Parameters.FLASH_MODE_ON :
flashMode = Camera.Parameters.FLASH_MODE_AUTO;
break;
case Camera.Parameters.FLASH_MODE_AUTO :
flashMode = Camera.Parameters.FLASH_MODE_OFF;
break;
case Camera.Parameters.FLASH_MODE_OFF :
flashMode = Camera.Parameters.FLASH_MODE_ON;;
break;
}
SharedPreferences sharedPref = getSharedPreferences(getString(R.string.flashMode), Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharedPref.edit();
editor.putString(getString(R.string.flashMode), flashMode);
editor.commit();
setFlashButton();
}
public void setFlashButton() {
switch (flashMode) {
case Camera.Parameters.FLASH_MODE_ON :
flashButton.setImageDrawable(getResources().getDrawable(R.drawable.camera_flash_on));
break;
case Camera.Parameters.FLASH_MODE_AUTO :
flashButton.setImageDrawable(getResources().getDrawable(R.drawable.camera_flash_auto));
break;
case Camera.Parameters.FLASH_MODE_OFF :
flashButton.setImageDrawable(getResources().getDrawable(R.drawable.camera_flash_off));
break;
}
}
//endregion
//region Bearing Methods
/**
* Method that gives a cardinal direction based on the current bearing to the true north
* #param bearing is the bearing to the true north
* #return cardinal direction that belongs to the bearing
*/
private String bearingToString(Double bearing) {
String strHeading = "?";
if (isBetween(bearing,-180.0,-157.5)) { strHeading = "South"; }
else if (isBetween(bearing,-157.5,-112.5)) { strHeading = "SouthWest"; }
else if (isBetween(bearing,-112.5,-67.5)) { strHeading = "West"; }
else if (isBetween(bearing,-67.5,-22.5)) { strHeading = "NorthWest"; }
else if (isBetween(bearing,-22.5,22.5)) { strHeading = "North"; }
else if (isBetween(bearing,22.5,67.5)) { strHeading = "NorthEast"; }
else if (isBetween(bearing,67.5,112.5)) { strHeading = "East"; }
else if (isBetween(bearing,112.5,157.5)) { strHeading = "SouthEast"; }
else if (isBetween(bearing,157.5,180.0)) { strHeading = "South"; }
return strHeading;
}
/**
* Method that checks if a certain number is in a certain range of numbers
* #param x is the number to check
* #param lower is the number that defines the lower boundary of the number range
* #param upper is the number that defines the upper boundary of the number range
* #return true if the number is between the other numbers, false otherwise
*/
private boolean isBetween(double x, double lower, double upper) {
return lower <= x && x <= upper;
}
/*
Method that triggers when the bearing changes, it sets the current bearing and sends an updated context to the provider
*/
#Override
public void onBearingChanged(double bearing) {
this.bearing = bearing;
mBearingProvider.setContext(this);
ImageView image = (ImageView) findViewById(R.id.ivCompass);
// create a rotation animation (reverse turn degree degrees)
if (bearing < 0) {
bearing += 360;
}
RotateAnimation ra = new RotateAnimation((float)currentBearing,(float)-bearing, Animation.RELATIVE_TO_SELF, 0.5f,Animation.RELATIVE_TO_SELF,0.5f);
// how long the animation will take place
ra.setDuration(210);
// set the animation after the end of the reservation status
ra.setFillAfter(true);
// Start the animation
image.startAnimation(ra);
currentBearing = -bearing;
mBearingProvider.setContext(this);
}
//endregion
}
EDIT 2:
I have made a small change to the onBearingChanged Method and now the compass is still visible, but thinks it's invisible and isn't moving because of my new if statement
#Override
public void onBearingChanged(double bearing) {
this.bearing = bearing;
mBearingProvider.setContext(this);
ImageView image = (ImageView) findViewById(R.id.ivCompass);
if (image.getVisibility() == View.VISIBLE) {
// create a rotation animation (reverse turn degree degrees)
if (bearing < 0) {
bearing += 360;
}
RotateAnimation ra = new RotateAnimation((float) currentBearing, (float) -bearing, Animation.RELATIVE_TO_SELF, 0.5f, Animation.RELATIVE_TO_SELF, 0.5f);
// how long the animation will take place
ra.setDuration(210);
// set the animation after the end of the reservation status
ra.setFillAfter(true);
// Start the animation
image.startAnimation(ra);
currentBearing = -bearing;
}
mBearingProvider.setContext(this);
}
If you have some kind of animation, the animation probably intefieres on the calling to invisibility. Try with this just before calling INVISIBLE:
findViewById(R.id.ivCompass).clearAnimation();

Implementing AsyncTask: An error occured while executing doInBackground() throws Runtime exception

I am trying to implement AsyncTak to inform the user that a background operation is running when a photo is taken. The AyncTask is able to display the setMessage function but along the line the app crashes and it displays the following error
Caused by: android.view.ViewRootImpl$CalledFromWrongThreadException: Only the original thread that created a view hierarchy can touch its views.
This is the complete codebase of my AsyncTask
private class CheckTypesTask extends AsyncTask<Void, Void, Void>{
ProgressDialog asyncDialog = new ProgressDialog(MainActivity.this);
#Override
protected void onPreExecute() {
//set message of the dialog
asyncDialog.setMessage("Loading");
//show dialog
asyncDialog.show();
super.onPreExecute();
}
#Override
protected Void doInBackground(Void... arg0) {
//don't touch dialog here it'll break the application
//do some lengthy stuff like calling login webservice
onPhotoTaken();
return null;
}
#Override
protected void onPostExecute(Void result) {
//hide the dialog
asyncDialog.dismiss();
super.onPostExecute(result);
}
}
Then I am calling the AsyncTask in onActivity result this way:
if (resultCode == -1) {
//onPhotoTaken();
(new MainActivity.CheckTypesTask()).execute();
} else {
Log.v(TAG, "User cancelled");
}
}
EDITTED:
This is the onPhotoTaken code and it is defined in MainActivity class.
public void onPhotoTaken() {
_taken = true;
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 4;
Bitmap bitmap = BitmapFactory.decodeFile(_path, options);
try {
//ProgressDialog dialog = ProgressDialog.show(this, "Loading", "Please wait...", true);
ExifInterface exif = new ExifInterface(_path);
int exifOrientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
Log.v(TAG, "Orient: " + exifOrientation);
int rotate = 0;
switch (exifOrientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotate = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotate = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotate = 270;
break;
}
Log.v(TAG, "Rotation: " + rotate);
if (rotate != 0) {
// Getting width & height of the given image.
int w = bitmap.getWidth();
int h = bitmap.getHeight();
// Setting pre rotate
Matrix mtx = new Matrix();
mtx.preRotate(rotate);
// Rotating Bitmap
bitmap = Bitmap.createBitmap(bitmap, 0, 0, w, h, mtx, false);
}
// Convert to ARGB_8888, required by tess
bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
//dialog.dismiss();
} catch (IOException e) {
Log.e(TAG, "Couldn't correct orientation: " + e.toString());
}
Please what could be wrong?
You are trying to access UI components (View) from a background thread in your case inside the doInBackground() method. You are not allowed to do that.
Call your function from onPostExecute()
Try this code
This is your layout which contain an trigger for image capture and an imageview to display captured image :-
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:fillViewport="true"
android:orientation="vertical"
tools:context="com.serveroverload.cube.ui.HomeActivity" >
<ImageView
android:id="#+id/preview"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_above="#+id/take_picture"
android:layout_alignParentTop="true"
android:layout_margin="5dp" />
<ImageView
android:id="#+id/take_picture"
android:layout_width="50dp"
android:layout_height="50dp"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_gravity="center"
android:layout_margin="10dp"
android:background="#drawable/flat_selector"
android:padding="5dp"
android:scaleType="fitXY"
android:src="#drawable/take_pic" />
</RelativeLayout>
This is your android manifest with required permisions
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.camtest"
android:versionCode="1"
android:versionName="1.0" >
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-sdk
android:minSdkVersion="8"
android:targetSdkVersion="21" />
<application
android:allowBackup="true"
android:icon="#drawable/ic_launcher"
android:label="#string/app_name"
android:theme="#style/AppTheme" >
<activity
android:name="com.serveroverload.cube.ui.HomeActivity"
android:label="#string/app_name" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
This is camera handler class to re-size and rotate images if required currently it will sample image to 1024x1280 for UI
public class CamerHandler {
private CamerHandler() {
getAllImages();
}
private static CamerHandler camerHandler;
public static CamerHandler GetCamerHandlerInstance() {
if (null == camerHandler) {
camerHandler = new CamerHandler();
}
return camerHandler;
}
private static final String CAM_DIRECTORY = "CamDirectory";
private static final int MAX_HEIGHT = 1024;
private static final int MAX_WIDTH = 1280;
private ArrayList<File> imageURL = new ArrayList<File>();
public ArrayList<File> getImageURL() {
return imageURL;
}
public void setImageURL(ArrayList<File> imageURL) {
this.imageURL = imageURL;
}
public void getAllImages() {
imageURL.clear();
File folder = new File(getImageDirectory());
File[] listOfFiles = folder.listFiles();
if (null != listOfFiles && listOfFiles.length != 0) {
for (int i = 0; i < listOfFiles.length; i++) {
if (listOfFiles[i].isFile()) {
imageURL.add(listOfFiles[i]);
System.out.println("File " + listOfFiles[i].getName());
} else if (listOfFiles[i].isDirectory()) {
System.out.println("Directory " + listOfFiles[i].getName());
}
}
}
}
/**
* This method is responsible for solving the rotation issue if exist. Also
* scale the images to 1024x1024 resolution
*
* #param context
* The current context
* #param selectedImage
* The Image URI
* #return Bitmap image results
* #throws IOException
*/
public Bitmap handleSamplingAndRotationBitmap(Context context, Uri selectedImage) throws IOException {
// First decode with inJustDecodeBounds=true to check dimensions
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
InputStream imageStream = context.getContentResolver().openInputStream(selectedImage);
BitmapFactory.decodeStream(imageStream, null, options);
imageStream.close();
// Calculate inSampleSize
options.inSampleSize = calculateInSampleSize(options, MAX_WIDTH, MAX_HEIGHT);
// Decode bitmap with inSampleSize set
options.inJustDecodeBounds = false;
imageStream = context.getContentResolver().openInputStream(selectedImage);
Bitmap img = BitmapFactory.decodeStream(imageStream, null, options);
// img = rotateImageIfRequired(img, selectedImage);
img = rotateBitmap(context, img, selectedImage);
return img;
}
public Bitmap rotateBitmap(Context context, Bitmap bitmap, Uri selectedImage) {
ExifInterface exif;
try {
exif = new ExifInterface(selectedImage.getPath());
int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_UNDEFINED);
Matrix matrix = new Matrix();
switch (orientation) {
case ExifInterface.ORIENTATION_NORMAL:
return bitmap;
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
matrix.setScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_180:
matrix.setRotate(180);
break;
case ExifInterface.ORIENTATION_FLIP_VERTICAL:
matrix.setRotate(180);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_TRANSPOSE:
matrix.setRotate(90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_90:
matrix.setRotate(90);
break;
case ExifInterface.ORIENTATION_TRANSVERSE:
matrix.setRotate(-90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_270:
matrix.setRotate(-90);
break;
default:
return bitmap;
}
// try {
Bitmap bmRotated = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
bitmap.recycle();
return bmRotated;
} catch (IOException e) {
e.printStackTrace();
return null;
} catch (OutOfMemoryError e) {
e.printStackTrace();
return null;
}
}
/**
* Calculate an inSampleSize for use in a {#link BitmapFactory.Options}
* object when decoding bitmaps using the decode* methods from
* {#link BitmapFactory}. This implementation calculates the closest
* inSampleSize that will result in the final decoded bitmap having a width
* and height equal to or larger than the requested width and height. This
* implementation does not ensure a power of 2 is returned for inSampleSize
* which can be faster when decoding but results in a larger bitmap which
* isn't as useful for caching purposes.
*
* #param options
* An options object with out* params already populated (run
* through a decode* method with inJustDecodeBounds==true
* #param reqWidth
* The requested width of the resulting bitmap
* #param reqHeight
* The requested height of the resulting bitmap
* #return The value to be used for inSampleSize
*/
private int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) {
// Raw height and width of image
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
// Calculate ratios of height and width to requested height and
// width
final int heightRatio = Math.round((float) height / (float) reqHeight);
final int widthRatio = Math.round((float) width / (float) reqWidth);
// Choose the smallest ratio as inSampleSize value, this will
// guarantee a final image
// with both dimensions larger than or equal to the requested height
// and width.
inSampleSize = heightRatio < widthRatio ? heightRatio : widthRatio;
// This offers some additional logic in case the image has a strange
// aspect ratio. For example, a panorama may have a much larger
// width than height. In these cases the total pixels might still
// end up being too large to fit comfortably in memory, so we should
// be more aggressive with sample down the image (=larger
// inSampleSize).
final float totalPixels = width * height;
// Anything more than 2x the requested pixels we'll sample down
// further
final float totalReqPixelsCap = reqWidth * reqHeight * 2;
while (totalPixels / (inSampleSize * inSampleSize) > totalReqPixelsCap) {
inSampleSize++;
}
}
return inSampleSize;
}
public void openGallery(Context context) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("content://media/internal/images/media"));
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
private void convertToBase64(Bitmap bitmap) {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
bitmap.compress(CompressFormat.JPEG, 30, byteArrayOutputStream);
byte[] byteArray = byteArrayOutputStream.toByteArray();
String encoded = Base64.encodeToString(byteArray, Base64.NO_WRAP);
// bitmap.recycle();
encoded = null;
byteArray = null;
}
public String getImageDirectory() {
return createDirIfNotExists().getAbsolutePath();
}
public File createDirIfNotExists() {
File imageDirectory = new File(Environment.getExternalStorageDirectory(), CAM_DIRECTORY);
if (!imageDirectory.exists()) {
if (!imageDirectory.mkdirs()) {
Log.e("imageDirectory:: ", "Problem creating Image folder");
}
}
return imageDirectory;
}
}
and this is activity code which take pictures and perform all down sampling and image rotation in background before updating on UI
public class HomeActivity extends Activity {
static final int REQUEST_IMAGE_CAPTURE = 1;
private ImageView previewLayout;
private static final String TAG = "MainActivity";
static Uri capturedImageUri = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home);
// Imageview to display Image
previewLayout = (ImageView) findViewById(R.id.preview);
findViewById(R.id.take_picture).setOnClickListener(
new OnClickListener() {
#Override
public void onClick(View v) {
dispatchTakePictureIntent();
}
});
}
private void dispatchTakePictureIntent() {
Intent intent = new Intent("android.media.action.IMAGE_CAPTURE");
if (intent.resolveActivity(getPackageManager()) != null) {
Calendar cal = Calendar.getInstance();
// store image in new File in image directory
File file = new File(CamerHandler.GetCamerHandlerInstance()
.getImageDirectory(), (cal.getTimeInMillis() + ".png"));
if (!file.exists()) {
try {
file.createNewFile();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
Toast.makeText(getApplicationContext(),
"Failed to make file", 500).show();
}
} else {
file.delete();
try {
file.createNewFile();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
Toast.makeText(getApplicationContext(),
"Failed to make file", 500).show();
}
}
capturedImageUri = Uri.fromFile(file);
intent.putExtra(MediaStore.EXTRA_OUTPUT, capturedImageUri);
startActivityForResult(intent, REQUEST_IMAGE_CAPTURE);
}
}
#SuppressLint("NewApi")
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK && requestCode == REQUEST_IMAGE_CAPTURE) {
// update file in gallery
sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE,
capturedImageUri));
// Downsample image before displaying in imageview to avoi OOM
// exception
new LoadBitMap(previewLayout, HomeActivity.this)
.execute(capturedImageUri);
} else {
Log.e(TAG, "FAILED TO TAKE IMAGE");
}
}
}
class LoadBitMap extends AsyncTask<Uri, Void, Void> {
public LoadBitMap(ImageView preview, Context context) {
this.prevImageView = preview;
this.mContext = context;
}
private Bitmap bitmap = null;
private ImageView prevImageView;
private Context mContext;
#Override
protected Void doInBackground(Uri... params) {
try {
bitmap = CamerHandler.GetCamerHandlerInstance()
.handleSamplingAndRotationBitmap(mContext, params[0]);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
#Override
protected void onPostExecute(Void result) {
if (null != bitmap) {
prevImageView.setBackground(new BitmapDrawable(mContext
.getResources(), bitmap));
}
super.onPostExecute(result);
}
}
Points to note here you can use Picasso or Glide for image loading task as we already have uri path in captureuri variable.

Portrait mode in CameraView

I'm going to develop an android application using BeyondAR framework. I try to use a CameraView component at the first half of the screen (app running only in portrait mode), but when i rotate camera 90 degrees image stretches and aspect ratio is wrong.Any help?
CameraView class (Beyondar framework)
public class CameraView extends SurfaceView implements SurfaceHolder.Callback,
Camera.PictureCallback {
/**
*
* #author Joan Puig Sanz (joanpuigsanz#gmail.com)
*
*/
public static interface IPictureCallback {
/**
* This method is called when the snapshot of the camera is ready. If
* there is an error, the image will be null
*
* #param picture
*/
void onPictureTaken(Bitmap picture);
}
private SurfaceHolder mHolder;
private Camera mCamera;
private IPictureCallback mCameraCallback;
private BitmapFactory.Options mOptions;
private Size mPreviewSize;
private List<Size> mSupportedPreviewSizes;
private List<String> mSupportedFlashModes;
public CameraView(Context context) {
super(context);
init(context);
}
public CameraView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context);
}
public CameraView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
private void init(Context context) {
mHolder = getHolder();
mHolder.addCallback(this);
try {
mCamera = Camera.open();
//mCamera.setDisplayOrientation(90);
//Camera.Parameters params = mCamera.getParameters();
//params.setPreviewSize(427, 1240);
//mCamera.setParameters(params);
setCamera(mCamera);
} catch (Exception e) {
Log.e(Constants.TAG, "ERROR: Unable to open the camera", e);
}
if (android.os.Build.VERSION.SDK_INT <= 10) {// Android 2.3.x or lower
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
}
public void setCamera(Camera camera) {
mCamera = camera;
if (mCamera != null) {
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();
mSupportedFlashModes = mCamera.getParameters().getSupportedFlashModes();
// Set the camera to Auto Flash mode.
if (mSupportedFlashModes != null
&& mSupportedFlashModes.contains(Camera.Parameters.FLASH_MODE_AUTO)) {
Camera.Parameters parameters = mCamera.getParameters();
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);
//parameters.setPreviewSize(300, 200);
mCamera.setParameters(parameters);
}
}
}
public void setSupportedPreviewSizes(List<Size> supportedPreviewSizes) {
mSupportedPreviewSizes = supportedPreviewSizes;
}
public Size getPreviewSize() {
return mPreviewSize;
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
try {
if (mCamera == null) {
init(getContext());
if (mCamera == null) {
return;
}
}
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
if (mCamera != null) {
mCamera.release();
}
mCamera = null;
Log.e(Constants.TAG, "CameraView -- ERROR en SurfaceCreated", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's very
// important to release it when the activity is paused.
if (mCamera == null) {
return;
}
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if (mSupportedPreviewSizes != null) {
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
}
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
private Size getOptimalPreviewSize(List<Size> sizes, int width, int height) {
Size result = null;
for (Camera.Size size : sizes) {
if (size.width <= width && size.height <= height) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) {
result = size;
}
}
}
}
return result;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (mCamera == null || getPreviewSize() == null) {
return;
}
Camera.Parameters parameters = mCamera.getParameters();
Size previewSize = getPreviewSize();
parameters.setPreviewSize(previewSize.width, previewSize.height);
mCamera.setParameters(parameters);
previewCamera();
}
#Override
public void onPictureTaken(byte[] imageData, Camera camera) {
if (imageData != null && mCameraCallback != null) {
mCameraCallback.onPictureTaken(StoreByteImage(imageData));
}
previewCamera();
}
public void previewCamera() {
if (mCamera == null){
return;
}
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e) {
Log.d(Constants.TAG, "Cannot start preview.", e);
}
}
private Bitmap StoreByteImage(byte[] imageData) {
Bitmap myImage = DebugBitmap.decodeByteArray(imageData, 0, imageData.length, mOptions);
imageData = null;
System.gc();
return myImage;
}
public void tackePicture(IPictureCallback cameraCallback) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 4;
tackePicture(cameraCallback, options);
}
public void tackePicture(IPictureCallback cameraCallback, BitmapFactory.Options options) {
if (mCamera == null) {
return;
}
mCameraCallback = cameraCallback;
mCamera.takePicture(null, this, this);
mOptions = options;
}
}
Edit
MyLayout xml file
<LinearLayout
android:layout_width="match_parent"
android:layout_height="0dip"
android:layout_weight="1"
android:orientation="horizontal"
android:paddingBottom="#dimen/padding"
android:paddingLeft="#dimen/padding"
android:paddingRight="#dimen/padding"
android:paddingTop="#dimen/padding" >
<FrameLayout
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent"
>
<com.beyondar.android.opengl.views.BeyondarGLSurfaceView
android:id="#+id/customGLSurface"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<com.beyondar.android.views.CameraView
android:id="#+id/camera"
android:layout_width="fill_parent"
android:layout_height="fill_parent" />
<TextView
android:id="#+id/labelText"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Touch an AR Object"
android:background="#000000"
android:textColor="#FFFFFF"
/>
</FrameLayout>
</LinearLayout
If you look at the example application for BeyondAR, you can see that it has a similar problem in that the camera's image gets stretched to fill the screen and so doesn't have the proper aspect ratio in both landscape in portrait. This is a common problem when working with the camera preview since it locks to a particular orientation when you first start it.
In order to get that, you need to resize the view on rotation to an aspect ratio that matches the device's camera. Here's the official Android guide.
Notice the part specifically called "Set the Preview Orientation".
BeyondAR has been updated, now it is way easier thanks to fragments. Check the web page and update your library:
https://github.com/BeyondAR/beyondar

Categories

Resources