I want to implement zoom function while video recording in android.But, I am not able to access Camera parameters or Camera startSmoothZoom() method in neither main activity nor surface class. If you access camera parametera in media recorder method(prepareMediaRecorder()) it will throw null pointer exception.
this activity class- in prepareMediaRecorder() method not able to access camera parameters and also not able to set startSmoothZoom(). here camera object giving null pointer exception.
public class CustomCameraPreview extends BaseActivity implements
OnClickListener, AlertPositiveListener, OrientationListener,
ActivityCompat.OnRequestPermissionsResultCallback {
RelativeLayout mLayout;
MediaRecorder mediaRecorder;
private PictureCallback mPictureCallback = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
try {
cameraData = data;
captureAngle = getRotation();
mBitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
}
catch (OutOfMemoryError e){
System.gc();
mBitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
}
// int rotation=getRotation();
Matrix matrix = new Matrix();
matrix.postRotate(getRotation());
/*mBitmap = Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(),
mBitmap.getHeight(), matrix, true);*/
mBitmap = Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(),
mBitmap.getHeight(), matrix, false);
if (mBitmap != null) {
mButtonRetake.setEnabled(true);
} else {
Message.displayToast(CustomCameraPreview.this,
getString(R.string.picture_not_taken));
}
mCamera.release();
mButtonCapture.setEnabled(false);
}
};
protected void onCreate(){
initiCameraForVideo();
}
private void initiCameraForVideo() {
params = mCamera.getParameters();
mButtonCapture.setBackgroundResource(R.drawable.videostart);
mShowCamera = new CameraSurfaceHolder(CustomCameraPreview.this,
mCamera);
mLayout = (RelativeLayout) findViewById(R.id.relativeLayout);
mLayout.removeAllViews();
mLayout.addView(mShowCamera);
List<Camera.Size> mSizeList_Video = null;// params.getSupportedPreviewSizes();
if (params.getSupportedVideoSizes() != null) {
mSizeList_Video = params.getSupportedVideoSizes();
} else {
// Video sizes may be null, which indicates that all the
// supported preview sizes are supported for video
// recording.
mSizeList_Video = mCamera.getParameters()
.getSupportedPreviewSizes();
}
}
#Override
public void onClick(View v) {
int viewId = v.getId();
switch (viewId) {
case R.id.button_Capture:
releaseCamera();
if (!prepareMediaRecorder()) {
Message.displayToast(
CustomCameraPreview.this,
getString(R.string.somethign_went_wrong));
} else {
mediaRecorder.start();
recording = true;
}
}
private boolean prepareMediaRecorder() \*method to setup media player to record video
{
mCamera = isCameraAvailable();
mediaRecorder = new MediaRecorder();
mCamera.unlock();
mediaRecorder.setCamera(mCamera);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
if(CamcorderProfile.hasProfile(findCameraID(), CamcorderProfile.QUALITY_480P)) {
mediaRecorder.setProfile(CamcorderProfile
.get(CamcorderProfile.QUALITY_480P));
}else{
mediaRecorder.setProfile(CamcorderProfile
.get(CamcorderProfile.QUALITY_LOW));
}
mediaRecorder.setOutputFile(getOutputVideoFile());
mediaRecorder.setMaxDuration(60000);
// mediaRecorder.setMaxFileSize(100 * 1000 * 1000);
mediaRecorder.setPreviewDisplay(mShowCamera.getHolder().getSurface());
try {
mediaRecorder.prepare();
}
}
#Override
protected void onPause() {
super.onPause();
releaseMediaRecorder();
releaseCamera();
}
private void releaseCamera() {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
and this surface preview class-
public class CameraSurfaceHolder extends SurfaceView implements
SurfaceHolder.Callback {
private static final String TAG = "CameraSurfaceHolder";
Context mContext;
private String errorMessage = "";
private SurfaceHolder mSurfaceHolder;
private Camera mCamera;
public CameraSurfaceHolder(Context context, Camera camera) {
super(context);
mContext = context;
mCamera = camera;
mSurfaceHolder = getHolder();
mSurfaceHolder.addCallback(this);
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
/*if (holder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
setCameraDisplayOrientation((Activity)mContext, Camera.CameraInfo.CAMERA_FACING_BACK, mCamera);
// start preview with new settings
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
}
*/
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
mCamera.setPreviewDisplay(mSurfaceHolder);
mCamera.startPreview();
} catch (Exception e) {
Logger.ex(e);
}
}
public void setCameraDisplayOrientation(Activity activity, int cameraId,
Camera camera) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
/*Display mDisplay = ((WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
int rotation = mDisplay.getDisplayId();*/
int rotation = activity.getWindowManager().getDefaultDisplay()
.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
Camera.Parameters mParameters = camera.getParameters();
mParameters.setRotation(rotation);
camera.setDisplayOrientation(result);
camera.setParameters(mParameters);
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
}
in above class I added some code like below and when user touch on camera preview its throwing null pointer exception in onTouchEvent() on access of camera paramters. Also tried like I set again camera object to surface in activity after configure media recorder(prepareMediaRecorder()), but zoom function working but video is not recording.
#Override
public boolean onTouchEvent(MotionEvent event) {
// Get the pointer ID
Camera.Parameters params = mCamera.getParameters();
int action = event.getAction();
if (event.getPointerCount() > 1) {
// handle multi-touch events
if (action == MotionEvent.ACTION_POINTER_DOWN) {
mDist = getFingerSpacing(event);
} else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) {
mCamera.cancelAutoFocus();
handleZoom(event, params);
}
} else {
// handle single touch events
if (action == MotionEvent.ACTION_UP) {
handleFocus(event, params);
}
}
return true;
}
private void handleZoom(MotionEvent event, Camera.Parameters params) {
int maxZoom = params.getMaxZoom();
int zoom = params.getZoom();
float newDist = getFingerSpacing(event);
if (newDist > mDist) {
//zoom in
if (zoom < maxZoom)
zoom++;
} else if (newDist < mDist) {
//zoom out
if (zoom > 0)
zoom--;
}
mDist = newDist;
params.setZoom(zoom);
mCamera.setParameters(params);
}
public void handleFocus(MotionEvent event, Camera.Parameters params) {
int pointerId = event.getPointerId(0);
int pointerIndex = event.findPointerIndex(pointerId);
// Get the pointer's current position
float x = event.getX(pointerIndex);
float y = event.getY(pointerIndex);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
mCamera.autoFocus(new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean b, Camera camera) {
// currently set to auto-focus on single touch
}
});
}
}
/** Determine the space between the first two fingers */
private float getFingerSpacing(MotionEvent event) {
// ...
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return (float)Math.sqrt(x * x + y * y);
}
I have a piece of code that compares to images and places a marker on the difference. So far it works well, except the latest marker layer that is added always shows underneath all the older markers. I have the latest one as a yellow color and the older ones as red. When the difference is close to one of the red markers, the yellow marker shows behind those ones.
Is there anyone that can help me get the yellow (Latest marker) to appear on top?
This is my code so far:
public class CheckmarkActivity extends AppCompatActivity implements ZoomLayout.OnZoomableLayoutClickEventListener {
TextView tv;
RelativeLayout relativeLayout_work;
ImageView imageViewtest;
Bitmap prevBmp = null;
Timer t;
TimerTask task;
int time = 100;
float image_Width;
float image_Height;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_checkmark);
if (getResources().getBoolean(R.bool.is_tablet)) {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
tv = findViewById(R.id.tv);
relativeLayout_work = findViewById(R.id.relativeLayout_work);
imageViewtest = findViewById(R.id.imageViewtest);
prevBmp = ViewcontrollerActivity.workSession.getLastScreenShot();
if (prevBmp == null || ViewcontrollerActivity.workSession.workScreenShot == null) {
setResult(Activity.RESULT_CANCELED);
finish();
}
startTimer();
}
// image compare
class TestAsync extends AsyncTask<Object, Integer, String>
{
String TAG = getClass().getSimpleName();
PointF p;
Bitmap test_3;
protected void onPreExecute (){
super.onPreExecute();
Log.d(TAG + " PreExceute","On pre Exceute......");
}
protected String doInBackground(Object...arg0) {
test_3 = ImageHelper.findDifference(CheckmarkActivity.this, prevBmp, ViewcontrollerActivity.workSession.workScreenShot);
p = ImageHelper.findShot(test_3);
time = 1;
return "You are at PostExecute";
}
protected void onProgressUpdate(Integer...a){
super.onProgressUpdate(a);
}
protected void onPostExecute(String result) {
super.onPostExecute(result);
addImageToImageview();
PointF np = Session.convertPointBitmap2View(p, relativeLayout_work, ViewcontrollerActivity.workSession.workScreenShot);
tv.setX(np.x - tv.getWidth() / 2);
tv.setY(np.y - tv.getHeight() / 2);
tv.setVisibility(View.VISIBLE);
// imageViewtest.setImageBitmap(test_3);
}
}
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i("OpenCV", "OpenCV loaded successfully");
new TestAsync().execute();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
#Override
protected void onResume() {
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d("OpenCV", "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d("OpenCV", "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public static int[] getBitmapOffset(ImageView img, Boolean includeLayout) {
int[] offset = new int[2];
float[] values = new float[9];
Matrix m = img.getImageMatrix();
m.getValues(values);
offset[0] = (int) values[5];
offset[1] = (int) values[2];
if (includeLayout) {
ViewGroup.MarginLayoutParams lp = (ViewGroup.MarginLayoutParams) img.getLayoutParams();
int paddingTop = (int) (img.getPaddingTop() );
int paddingLeft = (int) (img.getPaddingLeft() );
offset[0] += paddingTop + lp.topMargin;
offset[1] += paddingLeft + lp.leftMargin;
}
return offset;
}
public static int[] getBitmapPositionInsideImageView(ImageView imageView) {
int[] ret = new int[4];
if (imageView == null || imageView.getDrawable() == null)
return ret;
// Get image dimensions
// Get image matrix values and place them in an array
float[] f = new float[9];
imageView.getImageMatrix().getValues(f);
// Extract the scale values using the constants (if aspect ratio maintained, scaleX == scaleY)
final float scaleX = f[Matrix.MSCALE_X];
final float scaleY = f[Matrix.MSCALE_Y];
// Get the drawable (could also get the bitmap behind the drawable and getWidth/getHeight)
final Drawable d = imageView.getDrawable();
final int origW = d.getIntrinsicWidth();
final int origH = d.getIntrinsicHeight();
// Calculate the actual dimensions
final int actW = Math.round(origW * scaleX);
final int actH = Math.round(origH * scaleY);
ret[2] = actW;
ret[3] = actH;
// Get image position
// We assume that the image is centered into ImageView
int imgViewW = imageView.getWidth();
int imgViewH = imageView.getHeight();
int top = (int) (imgViewH - actH)/2;
int left = (int) (imgViewW - actW)/2;
ret[0] = left;
ret[1] = top;
return ret;
}
private void addImageToImageview(){
if (ViewcontrollerActivity.workSession.workScreenShot != null) {
imageViewtest.setImageBitmap(ViewcontrollerActivity.workSession.workScreenShot);
Log.d("width", String.valueOf(imageViewtest.getWidth()));
}
Resources r = getResources();
float px = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 20, r.getDisplayMetrics());
for (int i = 0; i < ViewcontrollerActivity.workSession.getShotCount(); i++) {
PointF p = ViewcontrollerActivity.workSession.getPoint(i);
TextView t = new TextView(this);
t.setText("" + (i + 1));
RelativeLayout.LayoutParams param = new RelativeLayout.LayoutParams((int)px, (int)px);
relativeLayout_work.addView(t);
t.setLayoutParams(param);
t.setGravity(Gravity.CENTER);
t.setBackgroundResource(R.drawable.circle);
p = Session.convertPointBitmap2View(p, relativeLayout_work, ViewcontrollerActivity.workSession.workScreenShot);
t.setX(p.x);
t.setY(p.y);
t.setTag(10000 + i);
}
}
public void onConfirm(View v){
View vv = findViewById(R.id.relativeLayout_work);
PointF bp = Session.convertPointView2Bitmap(new PointF(tv.getX(), tv.getY()), relativeLayout_work, ViewcontrollerActivity.workSession.workScreenShot);
ViewcontrollerActivity.workSession.addNewShot(ViewcontrollerActivity.workSession.workScreenShot, bp);
setResult(Activity.RESULT_OK);
finish();
}
public void onCancel(View v){
setResult(Activity.RESULT_CANCELED);
finish();
}
#Override
public void onBackPressed() {
setResult(Activity.RESULT_CANCELED);
finish();
}
#Override
public void OnContentClickEvent(int action, float xR, float yR) {
int[] offset = new int[2];
int[] rect = new int[4];
offset = this.getBitmapOffset(imageViewtest, false);
int original_width = imageViewtest.getDrawable().getIntrinsicWidth();
int original_height = imageViewtest.getDrawable().getIntrinsicHeight();
rect = getBitmapPositionInsideImageView(imageViewtest);
Log.i("OffsetY", String.valueOf(offset[0]));
Log.i("OffsetX", String.valueOf(offset[1]));
Log.i( "0", String.valueOf(rect[0]));
Log.i( "1", String.valueOf(rect[1]));
Log.i( "2", String.valueOf(rect[2]));
Log.i( "3", String.valueOf(rect[3]));
if (xR > rect[0] && xR < rect[0] + rect[2] && yR > rect[1] && yR < rect[1] + rect[3]) {
tv.setX(xR - tv.getWidth() / 2);
tv.setY(yR - tv.getHeight() / 2);
}
// tv.setX(xR - tv.getWidth() / 2);
// tv.setY(yR - tv.getHeight() / 2);
}
public void onMoveButtonPressed(View v) {
ImageButton b = (ImageButton)v;
int mId = b.getId();
switch (mId) {
case R.id.imageButtonL:
tv.setX(tv.getX() - 1);
break;
case R.id.imageButtonR:
tv.setX(tv.getX() + 1);
break;
case R.id.imageButtonD:
tv.setY(tv.getY() + 1);
break;
case R.id.imageButtonU:
tv.setY(tv.getY() - 1);
break;
}
}
//timer change image
public void startTimer(){
t = new Timer();
task = new TimerTask() {
#Override
public void run() {
runOnUiThread(new Runnable() {
#Override
public void run() {
if (time == 1){
imageViewtest.setImageBitmap(ViewcontrollerActivity.workSession.workScreenShot);
// tv.setVisibility(View.VISIBLE);
tv.setText("" + (ViewcontrollerActivity.workSession.getShotCount() + 1));
t.cancel();
return;
}
if (time % 2 == 0) {
imageViewtest.setImageBitmap(prevBmp);
}
else if(time % 2 == 1){
imageViewtest.setImageBitmap(ViewcontrollerActivity.workSession.workScreenShot);
}
time --;
}
});
}
};
t.scheduleAtFixedRate(task, 0, 500);
}
}
You can give the z-order of the child view with addView() function.
void addView (View child, int index)
ex)
private void addImageToImageview(){
if (ViewcontrollerActivity.workSession.workScreenShot != null) {
imageViewtest.setImageBitmap(ViewcontrollerActivity.workSession.workScreenShot);
Log.d("width", String.valueOf(imageViewtest.getWidth()));
}
Resources r = getResources();
float px = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 20, r.getDisplayMetrics());
int currChildrenCount = relativeLayout_work.getChildCount();
for (int i = 0; i < ViewcontrollerActivity.workSession.getShotCount(); i++) {
PointF p = ViewcontrollerActivity.workSession.getPoint(i);
TextView t = new TextView(this);
t.setText("" + (i + 1));
RelativeLayout.LayoutParams param = new RelativeLayout.LayoutParams((int)px, (int)px);
relativeLayout_work.addView(t, currChildrenCount+i); // You can control the order like this
t.setLayoutParams(param);
t.setGravity(Gravity.CENTER);
t.setBackgroundResource(R.drawable.circle);
p = Session.convertPointBitmap2View(p, relativeLayout_work, ViewcontrollerActivity.workSession.workScreenShot);
t.setX(p.x);
t.setY(p.y);
t.setTag(10000 + i);
}
}
I'm making a motion detector app on top of this project. I would like to display a watermark image on the camera preview.
I tried this method, but it didn't work for me. Please explain in code, how to show the watermark without having any issue for the motion detection part.
MotionDetection.java
public class MotionDetectionActivity extends SensorsActivity {
private static final String TAG = "MotionDetectionActivity";
private static SurfaceView preview = null;
private static SurfaceHolder previewHolder = null;
private static Camera camera = null;
private static boolean inPreview = false;
private static long mReferenceTime = 0;
private static IMotionDetection detector = null;
private static volatile AtomicBoolean processing = new AtomicBoolean(false);
/**
* {#inheritDoc}
*/
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
preview = (SurfaceView) findViewById(R.id.preview);
previewHolder = preview.getHolder();
previewHolder.addCallback(surfaceCallback);
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
if (Preferences.USE_RGB) {
detector = new RgbMotionDetection();
} else if (Preferences.USE_LUMA) {
detector = new LumaMotionDetection();
} else {
// Using State based (aggregate map)
detector = new AggregateLumaMotionDetection();
}
}
/**
* {#inheritDoc}
*/
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
}
/**
* {#inheritDoc}
*/
#Override
public void onPause() {
super.onPause();
camera.setPreviewCallback(null);
if (inPreview) camera.stopPreview();
inPreview = false;
camera.release();
camera = null;
}
/**
* {#inheritDoc}
*/
#Override
public void onResume() {
super.onResume();
camera = Camera.open();
}
private PreviewCallback previewCallback = new PreviewCallback() {
/**
* {#inheritDoc}
*/
#Override
public void onPreviewFrame(byte[] data, Camera cam) {
if (data == null) return;
Camera.Size size = cam.getParameters().getPreviewSize();
if (size == null) return;
if (!GlobalData.isPhoneInMotion()) {
DetectionThread thread = new DetectionThread(data, size.width, size.height);
thread.start();
}
}
};
private SurfaceHolder.Callback surfaceCallback = new SurfaceHolder.Callback() {
/**
* {#inheritDoc}
*/
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
camera.setPreviewDisplay(previewHolder);
camera.setPreviewCallback(previewCallback);
} catch (Throwable t) {
Log.e("PreviewDemo-surfaceCallback", "Exception in setPreviewDisplay()", t);
}
}
/**
* {#inheritDoc}
*/
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters parameters = camera.getParameters();
Camera.Size size = getBestPreviewSize(width, height, parameters);
if (size != null) {
parameters.setPreviewSize(size.width, size.height);
Log.d(TAG, "Using width=" + size.width + " height=" + size.height);
}
camera.setParameters(parameters);
camera.startPreview();
inPreview = true;
}
/**
* {#inheritDoc}
*/
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// Ignore
}
};
private static Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) {
Camera.Size result = null;
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
if (size.width <= width && size.height <= height) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) result = size;
}
}
}
return result;
}
private static final class DetectionThread extends Thread {
private byte[] data;
private int width;
private int height;
public DetectionThread(byte[] data, int width, int height) {
this.data = data;
this.width = width;
this.height = height;
}
/**
* {#inheritDoc}
*/
#Override
public void run() {
if (!processing.compareAndSet(false, true)) return;
// Log.d(TAG, "BEGIN PROCESSING...");
try {
// Previous frame
int[] pre = null;
if (Preferences.SAVE_PREVIOUS) pre = detector.getPrevious();
// Current frame (with changes)
// long bConversion = System.currentTimeMillis();
int[] img = null;
if (Preferences.USE_RGB) {
img = ImageProcessing.decodeYUV420SPtoRGB(data, width, height);
} else {
img = ImageProcessing.decodeYUV420SPtoLuma(data, width, height);
}
// long aConversion = System.currentTimeMillis();
// Log.d(TAG, "Converstion="+(aConversion-bConversion));
// Current frame (without changes)
int[] org = null;
if (Preferences.SAVE_ORIGINAL && img != null) org = img.clone();
if (img != null && detector.detect(img, width, height)) {
// The delay is necessary to avoid taking a picture while in
// the
// middle of taking another. This problem can causes some
// phones
// to reboot.
long now = System.currentTimeMillis();
if (now > (mReferenceTime + Preferences.PICTURE_DELAY)) {
mReferenceTime = now;
Bitmap previous = null;
if (Preferences.SAVE_PREVIOUS && pre != null) {
if (Preferences.USE_RGB) previous = ImageProcessing.rgbToBitmap(pre, width, height);
else previous = ImageProcessing.lumaToGreyscale(pre, width, height);
}
Bitmap original = null;
if (Preferences.SAVE_ORIGINAL && org != null) {
if (Preferences.USE_RGB) original = ImageProcessing.rgbToBitmap(org, width, height);
else original = ImageProcessing.lumaToGreyscale(org, width, height);
}
Bitmap bitmap = null;
if (Preferences.SAVE_CHANGES) {
if (Preferences.USE_RGB) bitmap = ImageProcessing.rgbToBitmap(img, width, height);
else bitmap = ImageProcessing.lumaToGreyscale(img, width, height);
}
Log.i(TAG, "Saving.. previous=" + previous + " original=" + original + " bitmap=" + bitmap);
Looper.prepare();
new SavePhotoTask().execute(previous, original, bitmap);
} else {
Log.i(TAG, "Not taking picture because not enough time has passed since the creation of the Surface");
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
processing.set(false);
}
// Log.d(TAG, "END PROCESSING...");
processing.set(false);
}
};
private static final class SavePhotoTask extends AsyncTask<Bitmap, Integer, Integer> {
/**
* {#inheritDoc}
*/
#Override
protected Integer doInBackground(Bitmap... data) {
for (int i = 0; i < data.length; i++) {
Bitmap bitmap = data[i];
String name = String.valueOf(System.currentTimeMillis());
if (bitmap != null) save(name, bitmap);
}
return 1;
}
private void save(String name, Bitmap bitmap) {
File photo = new File(Environment.getExternalStorageDirectory(), name + ".jpg");
if (photo.exists()) photo.delete();
try {
FileOutputStream fos = new FileOutputStream(photo.getPath());
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
}
}
main.xml
<?xml version="1.0" encoding="utf-8"?>
<SurfaceView xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/preview"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
</SurfaceView>
A workaround option is to overlay the Activity XML file with another XML file which contains the watermark image. To do so:
Create a new Layout file inside the layout folder. For eg:
overlay.xml
Insert an ImageView inside it, something like:
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:app="http://schemas.android.com/apk/res-auto">
<ImageView
android:id="#+id/imageView1"
android:layout_centerInParent="true"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="#drawable/android" />
</RelativeLayout>
Then inside the Activity java file, ie MotionDetector.java file ,
create a new method addView() :
private void addView()
{
controlInflater = LayoutInflater.from(getBaseContext());
View viewControl = controlInflater.inflate(R.layout.overlay, null);
LayoutParams layoutParamsControl = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
this.addContentView(viewControl, layoutParamsControl);
}
4) Finally invoke the addView() method from onCreate() to add the image:
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
preview = (SurfaceView) findViewById(R.id.preview);
previewHolder = preview.getHolder();
previewHolder.addCallback(surfaceCallback);
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
addView();
Then end result would be an image overlay above the SurfaceView. Subject to the quality of the watermark image, the rendered quality of the watermark shall seem original. Hope it helps.
This is my main activity, this is the code for camera filter. I didn't figure it out any error. but when i run this code it is crashing in my real device.can any one help in this please where is the error and what is error belongs to. I am attaching the log cat error as a pic. please find it
public class MainActivity extends AppCompatActivity {
private static final int REQUEST_CAMERA_PERSIMMISON = 101;
private CameraRenderer renderer;
private TextureView textureView;
private int filterId = R.id.filter0;
/**
* ATTENTION: This was auto-generated to implement the App Indexing API.
* See https://g.co/AppIndexing/AndroidStudio for more information.
*/
private GoogleApiClient client;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setTitle("Original");
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager
.PERMISSION_GRANTED) {
if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) {
Toast.makeText(this, "Camera acess is required.", Toast.LENGTH_SHORT).show();
} else {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERSIMMISON);
}
} else {
setupCameraPreviewView();
}
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client = new GoogleApiClient.Builder(this).addApi(AppIndex.API).build();
}
void setupCameraPreviewView() {
renderer = new CameraRenderer(this);
textureView = (TextureView) findViewById(R.id.textureView);
assert textureView != null;
textureView.setSurfaceTextureListener(renderer);
textureView.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
renderer.setSelectedFilter(R.id.filter0);
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_CANCEL:
renderer.setSelectedFilter(filterId);
break;
}
return true;
}
});
textureView.addOnLayoutChangeListener(new View.OnLayoutChangeListener() {
#Override
public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) {
renderer.onSurfaceTextureSizeChanged(null, v.getWidth(), v.getHeight());
}
});
}
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.filter, menu);
return true;
}
public boolean onOptionsItemSelected(MenuItem item) {
filterId = item.getItemId();
if (filterId == R.id.capture) {
Toast.makeText(this, capture() ? "The capture has been saved to your sdcard root path." : "Saved failed", Toast.LENGTH_SHORT).show();
return true;
}
setTitle(item.getTitle());
if (renderer != null)
renderer.setSelectedFilter(filterId);
return true;
}
private boolean capture() {
String mPath = genSaveFileName(getTitle().toString() + "_", ".png");
File imageFile = new File(mPath);
if (imageFile.exists()) {
imageFile.delete();
}
Bitmap bitmap = textureView.getBitmap();
OutputStream outputStream = null;
try {
outputStream = new FileOutputStream(imageFile);
bitmap.compress(Bitmap.CompressFormat.PNG, 90, outputStream);
outputStream.flush();
outputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
return false;
} catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
private String genSaveFileName(String prefix, String suffix) {
Date date = new Date();
SimpleDateFormat dateFormat1 = new SimpleDateFormat("yyyyMMdd_hhmmss");
String timeString = dateFormat1.format(date);
String externalPath = Environment.getExternalStorageDirectory().toString();
return externalPath + "/" + prefix + timeString + suffix;
}
#Override
public void onStart() {
super.onStart();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client.connect();
Action viewAction = Action.newAction(
Action.TYPE_VIEW, // TODO: choose an action type.
"Main Page", // TODO: Define a title for the content shown.
// TODO: If you have web page content that matches this app activity's content,
// make sure this auto-generated web page URL is correct.
// Otherwise, set the URL to null.
Uri.parse("http://host/path"),
// TODO: Make sure this auto-generated app URL is correct.
Uri.parse("android-app://giri.com.camerafilter/http/host/path")
);
AppIndex.AppIndexApi.start(client, viewAction);
}
#Override
public void onStop() {
super.onStop();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
Action viewAction = Action.newAction(
Action.TYPE_VIEW, // TODO: choose an action type.
"Main Page", // TODO: Define a title for the content shown.
// TODO: If you have web page content that matches this app activity's content,
// make sure this auto-generated web page URL is correct.
// Otherwise, set the URL to null.
Uri.parse("http://host/path"),
// TODO: Make sure this auto-generated app URL is correct.
Uri.parse("android-app://giri.com.camerafilter/http/host/path")
);
AppIndex.AppIndexApi.end(client, viewAction);
client.disconnect();
}
}
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
>
<TextView
android:id="#+id/textureView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
/>
</RelativeLayout>
This is my CameraRenderer.java
public class CameraRenderer extends Thread implements TextureView.SurfaceTextureListener {
private static final String TAG = "CameraRenderer";
private static final int EGL_OPENGL_ES2_BIT = 4;
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
private static final int DRAW_INTERVAL = 1000 / 30;
private Context context;
private SurfaceTexture surfaceTexture;
private int gwidth, gheight;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface;
private EGLContext eglContext;
private EGL10 egl10;
private Camera camera;
private SurfaceTexture cameraSurfaceTexture;
private int cameraTextureId;
private CameraFilter selectedFilter;
private SparseArray<CameraFilter> cameraFilterMap = new SparseArray<>();
public CameraRenderer(Context context) {
this.context = context;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
GLES20.glViewport(0, 0, gwidth = width, gheight = height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
if (camera != null) {
camera.stopPreview();
camera.release();
}
interrupt();
CameraFilter.release();
return true;
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
if (isAlive()) {
interrupt();
}
surfaceTexture = surface;
GLES20.glViewport(0, 0, gwidth = width, gheight = height);
// Open camera
Pair<Camera.CameraInfo, Integer> backCamera = getBackCamera();
final int backCameraId = backCamera.second;
camera = Camera.open(backCameraId);
// Start rendering
start();
}
public void setSelectedFilter(int id) {
selectedFilter = cameraFilterMap.get(id);
selectedFilter.onAttach();
}
#Override
public void run() {
initGL(surfaceTexture);
// Setup camera filters map
cameraFilterMap.append(R.id.filter0, new OriginalFilter(context));
cameraFilterMap.append(R.id.filter1, new EdgeDetectionFilter(context));
cameraFilterMap.append(R.id.filter2, new PixelizeFilter(context));
cameraFilterMap.append(R.id.filter3, new EMInterferenceFilter(context));
cameraFilterMap.append(R.id.filter4, new TrianglesMosaicFilter(context));
cameraFilterMap.append(R.id.filter5, new LegofiedFilter(context));
cameraFilterMap.append(R.id.filter6, new TileMosaicFilter(context));
cameraFilterMap.append(R.id.filter7, new BlueorangeFilter(context));
cameraFilterMap.append(R.id.filter8, new ChromaticAberrationFilter(context));
cameraFilterMap.append(R.id.filter9, new BasicDeformFilter(context));
cameraFilterMap.append(R.id.filter10, new ContrastFilter(context));
cameraFilterMap.append(R.id.filter11, new NoiseWarpFilter(context));
cameraFilterMap.append(R.id.filter12, new RefractionFilter(context));
cameraFilterMap.append(R.id.filter13, new MappingFilter(context));
cameraFilterMap.append(R.id.filter14, new CrosshatchFilter(context));
cameraFilterMap.append(R.id.filter15, new LichtensteinEsqueFilter(context));
cameraFilterMap.append(R.id.filter16, new AsciiArtFilter(context));
cameraFilterMap.append(R.id.filter17, new MoneyFilter(context));
cameraFilterMap.append(R.id.filter18, new CrackedFilter(context));
cameraFilterMap.append(R.id.filter19, new PolygonizationFilter(context));
cameraFilterMap.append(R.id.filter20, new JFAVoronoiFilter(context));
setSelectedFilter(R.id.filter0);
// Create texture for camera preview
cameraTextureId = MyGLUtils.genTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
cameraSurfaceTexture = new SurfaceTexture(cameraTextureId);
// Start camera preview
try {
camera.setPreviewTexture(cameraSurfaceTexture);
camera.startPreview();
} catch (IOException ioe) {
// Something bad happened
}
// Render loop
while (!Thread.currentThread().isInterrupted()) {
try {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Update the camera preview texture
synchronized (this) {
cameraSurfaceTexture.updateTexImage();
}
// Draw camera preview
selectedFilter.draw(cameraTextureId, gwidth, gheight);
// Flush
GLES20.glFlush();
egl10.eglSwapBuffers(eglDisplay, eglSurface);
Thread.sleep(DRAW_INTERVAL);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
cameraSurfaceTexture.release();
GLES20.glDeleteTextures(1, new int[]{cameraTextureId}, 0);
}
private void initGL(SurfaceTexture texture) {
egl10 = (EGL10) EGLContext.getEGL();
eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
throw new RuntimeException("eglGetDisplay failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
}
int[] version = new int[2];
if (!egl10.eglInitialize(eglDisplay, version)) {
throw new RuntimeException("eglInitialize failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
}
int[] configsCount = new int[1];
EGLConfig[] configs = new EGLConfig[1];
int[] configSpec = {
EGL10.EGL_RENDERABLE_TYPE,
EGL_OPENGL_ES2_BIT,
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8,
EGL10.EGL_DEPTH_SIZE, 0,
EGL10.EGL_STENCIL_SIZE, 0,
EGL10.EGL_NONE
};
EGLConfig eglConfig = null;
if (!egl10.eglChooseConfig(eglDisplay, configSpec, configs, 1, configsCount)) {
throw new IllegalArgumentException("eglChooseConfig failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
} else if (configsCount[0] > 0) {
eglConfig = configs[0];
}
if (eglConfig == null) {
throw new RuntimeException("eglConfig not initialized");
}
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
eglContext = egl10.eglCreateContext(eglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
eglSurface = egl10.eglCreateWindowSurface(eglDisplay, eglConfig, texture, null);
if (eglSurface == null || eglSurface == EGL10.EGL_NO_SURFACE) {
int error = egl10.eglGetError();
if (error == EGL10.EGL_BAD_NATIVE_WINDOW) {
Log.e(TAG, "eglCreateWindowSurface returned EGL10.EGL_BAD_NATIVE_WINDOW");
return;
}
throw new RuntimeException("eglCreateWindowSurface failed " + android.opengl.GLUtils.getEGLErrorString(error));
}
if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException("eglMakeCurrent failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
}
}
private Pair<Camera.CameraInfo, Integer> getBackCamera() {
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
final int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; ++i) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
return new Pair<>(cameraInfo, i);
}
}
return null;
}
}
**This is my CameraFilter.java file**
public abstract class CameraFilter {
static final float SQUARE_COORDS[] = {
1.0f, -1.0f,
-1.0f, -1.0f,
1.0f, 1.0f,
-1.0f, 1.0f,
};
static final float TEXTURE_COORDS[] = {
1.0f, 0.0f,
0.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
};
static FloatBuffer VERTEX_BUF, TEXTURE_COORD_BUF;
static int PROGRAM = 0;
private static final int BUF_ACTIVE_TEX_UNIT = GLES20.GL_TEXTURE8;
private static RenderBuffer CAMERA_RENDER_BUF;
private static final float ROATED_TEXTURE_COORDS[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
};
private static FloatBuffer ROATED_TEXTURE_COORD_BUF;
final long START_TIME = System.currentTimeMillis();
int iFrame = 0;
public CameraFilter(Context context) {
// Setup default Buffers
if (VERTEX_BUF == null) {
VERTEX_BUF = ByteBuffer.allocateDirect(SQUARE_COORDS.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
VERTEX_BUF.put(SQUARE_COORDS);
VERTEX_BUF.position(0);
}
if (TEXTURE_COORD_BUF == null) {
TEXTURE_COORD_BUF = ByteBuffer.allocateDirect(TEXTURE_COORDS.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
TEXTURE_COORD_BUF.put(TEXTURE_COORDS);
TEXTURE_COORD_BUF.position(0);
}
if (ROATED_TEXTURE_COORD_BUF == null) {
ROATED_TEXTURE_COORD_BUF = ByteBuffer.allocateDirect(ROATED_TEXTURE_COORDS.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
ROATED_TEXTURE_COORD_BUF.put(ROATED_TEXTURE_COORDS);
ROATED_TEXTURE_COORD_BUF.position(0);
}
if (PROGRAM == 0) {
PROGRAM = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.original_rtt);
}
}
#CallSuper
public void onAttach() {
iFrame = 0;
}
final public void draw(int cameraTexId, int canvasWidth, int canvasHeight) {
// TODO move?
// Create camera render buffer
if (CAMERA_RENDER_BUF == null ||
CAMERA_RENDER_BUF.getWidth() != canvasWidth ||
CAMERA_RENDER_BUF.getHeight() != canvasHeight) {
CAMERA_RENDER_BUF = new RenderBuffer(canvasWidth, canvasHeight, BUF_ACTIVE_TEX_UNIT);
}
// Use shaders
GLES20.glUseProgram(PROGRAM);
int iChannel0Location = GLES20.glGetUniformLocation(PROGRAM, "iChannel0");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexId);
GLES20.glUniform1i(iChannel0Location, 0);
int vPositionLocation = GLES20.glGetAttribLocation(PROGRAM, "vPosition");
GLES20.glEnableVertexAttribArray(vPositionLocation);
GLES20.glVertexAttribPointer(vPositionLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, VERTEX_BUF);
int vTexCoordLocation = GLES20.glGetAttribLocation(PROGRAM, "vTexCoord");
GLES20.glEnableVertexAttribArray(vTexCoordLocation);
GLES20.glVertexAttribPointer(vTexCoordLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, ROATED_TEXTURE_COORD_BUF);
// Render to texture
CAMERA_RENDER_BUF.bind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
CAMERA_RENDER_BUF.unbind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
onDraw(CAMERA_RENDER_BUF.getTexId(), canvasWidth, canvasHeight);
iFrame ++;
}
abstract void onDraw(int cameraTexId, int canvasWidth, int canvasHeight);
void setupShaderInputs(int program, int[] iResolution, int[] iChannels, int[][] iChannelResolutions) {
setupShaderInputs(program, VERTEX_BUF, TEXTURE_COORD_BUF, iResolution, iChannels, iChannelResolutions);
}
void setupShaderInputs(int program, FloatBuffer vertex, FloatBuffer textureCoord, int[] iResolution, int[] iChannels, int[][] iChannelResolutions) {
GLES20.glUseProgram(program);
int iResolutionLocation = GLES20.glGetUniformLocation(program, "iResolution");
GLES20.glUniform3fv(iResolutionLocation, 1,
FloatBuffer.wrap(new float[]{(float) iResolution[0], (float) iResolution[1], 1.0f}));
float time = ((float) (System.currentTimeMillis() - START_TIME)) / 1000.0f;
int iGlobalTimeLocation = GLES20.glGetUniformLocation(program, "iGlobalTime");
GLES20.glUniform1f(iGlobalTimeLocation, time);
int iFrameLocation = GLES20.glGetUniformLocation(program, "iFrame");
GLES20.glUniform1i(iFrameLocation, iFrame);
int vPositionLocation = GLES20.glGetAttribLocation(program, "vPosition");
GLES20.glEnableVertexAttribArray(vPositionLocation);
GLES20.glVertexAttribPointer(vPositionLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, vertex);
int vTexCoordLocation = GLES20.glGetAttribLocation(program, "vTexCoord");
GLES20.glEnableVertexAttribArray(vTexCoordLocation);
GLES20.glVertexAttribPointer(vTexCoordLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, textureCoord);
for (int i = 0; i < iChannels.length; i ++) {
int sTextureLocation = GLES20.glGetUniformLocation(program, "iChannel" + i);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, iChannels[i]);
GLES20.glUniform1i(sTextureLocation, i);
}
float _iChannelResolutions[] = new float[iChannelResolutions.length * 3];
for (int i = 0; i < iChannelResolutions.length; i++) {
_iChannelResolutions[i*3] = iChannelResolutions[i][0];
_iChannelResolutions[i*3 + 1] = iChannelResolutions[i][1];
_iChannelResolutions[i*3 + 2] = 1.0f;
}
int iChannelResolutionLocation = GLES20.glGetUniformLocation(program, "iChannelResolution");
GLES20.glUniform3fv(iChannelResolutionLocation,
_iChannelResolutions.length, FloatBuffer.wrap(_iChannelResolutions));
}
public static void release() {
PROGRAM = 0;
CAMERA_RENDER_BUF = null;
}
}
Still getting an error
E/AndroidRuntime: FATAL EXCEPTION: Thread-1759 Process: giri.com.camerafilter, PID: 21244
java.lang.NullPointerException: Attempt to invoke virtual method 'void giri.com.camerafilter.RenderBuffer.unbind()' on a null object reference
at giri.com.camerafilter.filter.CameraFilter.draw(CameraFilter.java:126)
at giri.com.camerafilter.CameraRenderer.run(CameraRenderer.java:165)
You are getting a ClassCastException because you're trying to cast a TextView to TextureView.Change:
<TextView
android:id="#+id/textureView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
/>
to <TextureView...>:
<TextureView
android:id="#+id/textureView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
/>
There is classCastException which states that You are casting one class to another class which have different signatures. You have used AppCompatTextview in your xml and You are Casting it to TextView
you might be doing this
AppCompatTextView textView = (TextView) findViewById(R.id.your_id);
and you should do as follows
AppCompatTextView textView = (AppCompatTextView) findViewById(R.id.your_id);
I am using a FrameLayout to carry out the swipe gesture of each of my cardviews, but the cards do not always dismiss. And sometimes they just hang on the left or the right until the user swipes the card a second time.
How can I fix this?
MyFrameLayout:
public class MyFrameLayout extends FrameLayout {
private static int mWidth = 200;
MyFrameLayout touchFrameLayout;
// Constructors
// i call "initialize(context)" in all of them
private void initialize(Context context) {
setOnTouchListener(mTouchListener);
touchFrameLayout = this;
}
private float mDisplacementX;
private float mDisplacementY;
private float mInitialTx;
private boolean mTracking;
private OnTouchListener mTouchListener = new OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent event) {
mWidth = (int) touchFrameLayout.getLayoutParams().width;
switch (event.getActionMasked()) {
case MotionEvent.ACTION_DOWN:
mDisplacementX = event.getRawX();
mDisplacementY = event.getRawY();
mInitialTx = getTranslationX();
return true;
case MotionEvent.ACTION_MOVE:
// get the delta distance in X and Y direction
float deltaX = event.getRawX() - mDisplacementX;
float deltaY = event.getRawY() - mDisplacementY;
// updatePressedState(false);
// set the touch and cancel event
if ((Math.abs(deltaX) > ViewConfiguration.get(getContext())
.getScaledTouchSlop() * 2 && Math.abs(deltaY) < Math
.abs(deltaX) / 2)
|| mTracking) {
mTracking = true;
if (getTranslationX() <= mWidth / 2
&& getTranslationX() >= -(mWidth / 2)) {
setTranslationX(mInitialTx + deltaX);
return true;
}
}
break;
case MotionEvent.ACTION_UP:
if (mTracking) {
mTracking = false;
float currentTranslateX = getTranslationX();
if (currentTranslateX > (mWidth/10)) {
rightSwipe();
} else if (currentTranslateX < -(mWidth*10)) {
leftSwipe();
}
// comment this line if you don't want your frame layout to
// take its original position after releasing the touch
setTranslationX(0);
return true;
} else {
// handle click event
setTranslationX(0);
}
break;
}
return false;
}
};
private void rightSwipe() {
Toast.makeText(this.getContext(), "Swipe to the right",
Toast.LENGTH_SHORT).show();
DeleteCard();
}
private void leftSwipe() {
Toast.makeText(this.getContext(), "Swipe to the left",
Toast.LENGTH_SHORT).show();
DeleteCard();
}
}
Xml:
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.example.testing.android.layout.MyFrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="wrap_content">
<android.support.v7.widget.CardView
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:card_view="http://schemas.android.com/apk/res-auto"
android:id="#+id/taskViewContainer"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_gravity="center"
app:cardElevation="8dp"
app:cardPreventCornerOverlap="false"
card_view:cardCornerRadius="8dp">
</android.support.v7.widget.CardView>
</com.example.testing.android.layout.MyFrameLayout>
</RelativeLayout>
I adapted romannurik's Android-SwipeToDismiss to do exactly that.
The code is on github with a woking sample application, and consists of:
A subclass of RecyclerView.OnItemTouchListener that listens to touch events and detects when an item is being swiped, animating it accordingly.
A SwipeListener that is called in order to know if an item can be dismissed and called again when items are dismissed.
To use it, copy the class SwipeableRecyclerViewTouchListener to your project and use it like this
mAdapter = new CardViewAdapter(mItems);
mRecyclerView = (RecyclerView) findViewById(R.id.recycler_view);
mRecyclerView.setLayoutManager(new LinearLayoutManager(this));
mRecyclerView.setAdapter(mAdapter);
SwipeableRecyclerViewTouchListener swipeTouchListener =
new SwipeableRecyclerViewTouchListener(mRecyclerView,
new SwipeableRecyclerViewTouchListener.SwipeListener() {
#Override
public boolean canSwipe(int position) {
return true;
}
#Override
public void onDismissedBySwipeLeft(RecyclerView recyclerView, int[] reverseSortedPositions) {
for (int position : reverseSortedPositions) {
mItems.remove(position);
mAdapter.notifyItemRemoved(position);
}
mAdapter.notifyDataSetChanged();
}
#Override
public void onDismissedBySwipeRight(RecyclerView recyclerView, int[] reverseSortedPositions) {
for (int position : reverseSortedPositions) {
mItems.remove(position);
mAdapter.notifyItemRemoved(position);
}
mAdapter.notifyDataSetChanged();
}
});
mRecyclerView.addOnItemTouchListener(swipeTouchListener);
}
This can also be useful : http://www.getgoodcode.com/2013/06/swipe-to-dismiss-google-style/
Hope it works for you !