logcat error can't find the error - java
This is my main activity, this is the code for camera filter. I didn't figure it out any error. but when i run this code it is crashing in my real device.can any one help in this please where is the error and what is error belongs to. I am attaching the log cat error as a pic. please find it
public class MainActivity extends AppCompatActivity {
private static final int REQUEST_CAMERA_PERSIMMISON = 101;
private CameraRenderer renderer;
private TextureView textureView;
private int filterId = R.id.filter0;
/**
* ATTENTION: This was auto-generated to implement the App Indexing API.
* See https://g.co/AppIndexing/AndroidStudio for more information.
*/
private GoogleApiClient client;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setTitle("Original");
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager
.PERMISSION_GRANTED) {
if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) {
Toast.makeText(this, "Camera acess is required.", Toast.LENGTH_SHORT).show();
} else {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERSIMMISON);
}
} else {
setupCameraPreviewView();
}
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client = new GoogleApiClient.Builder(this).addApi(AppIndex.API).build();
}
void setupCameraPreviewView() {
renderer = new CameraRenderer(this);
textureView = (TextureView) findViewById(R.id.textureView);
assert textureView != null;
textureView.setSurfaceTextureListener(renderer);
textureView.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
renderer.setSelectedFilter(R.id.filter0);
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_CANCEL:
renderer.setSelectedFilter(filterId);
break;
}
return true;
}
});
textureView.addOnLayoutChangeListener(new View.OnLayoutChangeListener() {
#Override
public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) {
renderer.onSurfaceTextureSizeChanged(null, v.getWidth(), v.getHeight());
}
});
}
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.filter, menu);
return true;
}
public boolean onOptionsItemSelected(MenuItem item) {
filterId = item.getItemId();
if (filterId == R.id.capture) {
Toast.makeText(this, capture() ? "The capture has been saved to your sdcard root path." : "Saved failed", Toast.LENGTH_SHORT).show();
return true;
}
setTitle(item.getTitle());
if (renderer != null)
renderer.setSelectedFilter(filterId);
return true;
}
private boolean capture() {
String mPath = genSaveFileName(getTitle().toString() + "_", ".png");
File imageFile = new File(mPath);
if (imageFile.exists()) {
imageFile.delete();
}
Bitmap bitmap = textureView.getBitmap();
OutputStream outputStream = null;
try {
outputStream = new FileOutputStream(imageFile);
bitmap.compress(Bitmap.CompressFormat.PNG, 90, outputStream);
outputStream.flush();
outputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
return false;
} catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
private String genSaveFileName(String prefix, String suffix) {
Date date = new Date();
SimpleDateFormat dateFormat1 = new SimpleDateFormat("yyyyMMdd_hhmmss");
String timeString = dateFormat1.format(date);
String externalPath = Environment.getExternalStorageDirectory().toString();
return externalPath + "/" + prefix + timeString + suffix;
}
#Override
public void onStart() {
super.onStart();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client.connect();
Action viewAction = Action.newAction(
Action.TYPE_VIEW, // TODO: choose an action type.
"Main Page", // TODO: Define a title for the content shown.
// TODO: If you have web page content that matches this app activity's content,
// make sure this auto-generated web page URL is correct.
// Otherwise, set the URL to null.
Uri.parse("http://host/path"),
// TODO: Make sure this auto-generated app URL is correct.
Uri.parse("android-app://giri.com.camerafilter/http/host/path")
);
AppIndex.AppIndexApi.start(client, viewAction);
}
#Override
public void onStop() {
super.onStop();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
Action viewAction = Action.newAction(
Action.TYPE_VIEW, // TODO: choose an action type.
"Main Page", // TODO: Define a title for the content shown.
// TODO: If you have web page content that matches this app activity's content,
// make sure this auto-generated web page URL is correct.
// Otherwise, set the URL to null.
Uri.parse("http://host/path"),
// TODO: Make sure this auto-generated app URL is correct.
Uri.parse("android-app://giri.com.camerafilter/http/host/path")
);
AppIndex.AppIndexApi.end(client, viewAction);
client.disconnect();
}
}
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
>
<TextView
android:id="#+id/textureView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
/>
</RelativeLayout>
This is my CameraRenderer.java
public class CameraRenderer extends Thread implements TextureView.SurfaceTextureListener {
private static final String TAG = "CameraRenderer";
private static final int EGL_OPENGL_ES2_BIT = 4;
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
private static final int DRAW_INTERVAL = 1000 / 30;
private Context context;
private SurfaceTexture surfaceTexture;
private int gwidth, gheight;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface;
private EGLContext eglContext;
private EGL10 egl10;
private Camera camera;
private SurfaceTexture cameraSurfaceTexture;
private int cameraTextureId;
private CameraFilter selectedFilter;
private SparseArray<CameraFilter> cameraFilterMap = new SparseArray<>();
public CameraRenderer(Context context) {
this.context = context;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
GLES20.glViewport(0, 0, gwidth = width, gheight = height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
if (camera != null) {
camera.stopPreview();
camera.release();
}
interrupt();
CameraFilter.release();
return true;
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
if (isAlive()) {
interrupt();
}
surfaceTexture = surface;
GLES20.glViewport(0, 0, gwidth = width, gheight = height);
// Open camera
Pair<Camera.CameraInfo, Integer> backCamera = getBackCamera();
final int backCameraId = backCamera.second;
camera = Camera.open(backCameraId);
// Start rendering
start();
}
public void setSelectedFilter(int id) {
selectedFilter = cameraFilterMap.get(id);
selectedFilter.onAttach();
}
#Override
public void run() {
initGL(surfaceTexture);
// Setup camera filters map
cameraFilterMap.append(R.id.filter0, new OriginalFilter(context));
cameraFilterMap.append(R.id.filter1, new EdgeDetectionFilter(context));
cameraFilterMap.append(R.id.filter2, new PixelizeFilter(context));
cameraFilterMap.append(R.id.filter3, new EMInterferenceFilter(context));
cameraFilterMap.append(R.id.filter4, new TrianglesMosaicFilter(context));
cameraFilterMap.append(R.id.filter5, new LegofiedFilter(context));
cameraFilterMap.append(R.id.filter6, new TileMosaicFilter(context));
cameraFilterMap.append(R.id.filter7, new BlueorangeFilter(context));
cameraFilterMap.append(R.id.filter8, new ChromaticAberrationFilter(context));
cameraFilterMap.append(R.id.filter9, new BasicDeformFilter(context));
cameraFilterMap.append(R.id.filter10, new ContrastFilter(context));
cameraFilterMap.append(R.id.filter11, new NoiseWarpFilter(context));
cameraFilterMap.append(R.id.filter12, new RefractionFilter(context));
cameraFilterMap.append(R.id.filter13, new MappingFilter(context));
cameraFilterMap.append(R.id.filter14, new CrosshatchFilter(context));
cameraFilterMap.append(R.id.filter15, new LichtensteinEsqueFilter(context));
cameraFilterMap.append(R.id.filter16, new AsciiArtFilter(context));
cameraFilterMap.append(R.id.filter17, new MoneyFilter(context));
cameraFilterMap.append(R.id.filter18, new CrackedFilter(context));
cameraFilterMap.append(R.id.filter19, new PolygonizationFilter(context));
cameraFilterMap.append(R.id.filter20, new JFAVoronoiFilter(context));
setSelectedFilter(R.id.filter0);
// Create texture for camera preview
cameraTextureId = MyGLUtils.genTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
cameraSurfaceTexture = new SurfaceTexture(cameraTextureId);
// Start camera preview
try {
camera.setPreviewTexture(cameraSurfaceTexture);
camera.startPreview();
} catch (IOException ioe) {
// Something bad happened
}
// Render loop
while (!Thread.currentThread().isInterrupted()) {
try {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Update the camera preview texture
synchronized (this) {
cameraSurfaceTexture.updateTexImage();
}
// Draw camera preview
selectedFilter.draw(cameraTextureId, gwidth, gheight);
// Flush
GLES20.glFlush();
egl10.eglSwapBuffers(eglDisplay, eglSurface);
Thread.sleep(DRAW_INTERVAL);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
cameraSurfaceTexture.release();
GLES20.glDeleteTextures(1, new int[]{cameraTextureId}, 0);
}
private void initGL(SurfaceTexture texture) {
egl10 = (EGL10) EGLContext.getEGL();
eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
throw new RuntimeException("eglGetDisplay failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
}
int[] version = new int[2];
if (!egl10.eglInitialize(eglDisplay, version)) {
throw new RuntimeException("eglInitialize failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
}
int[] configsCount = new int[1];
EGLConfig[] configs = new EGLConfig[1];
int[] configSpec = {
EGL10.EGL_RENDERABLE_TYPE,
EGL_OPENGL_ES2_BIT,
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8,
EGL10.EGL_DEPTH_SIZE, 0,
EGL10.EGL_STENCIL_SIZE, 0,
EGL10.EGL_NONE
};
EGLConfig eglConfig = null;
if (!egl10.eglChooseConfig(eglDisplay, configSpec, configs, 1, configsCount)) {
throw new IllegalArgumentException("eglChooseConfig failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
} else if (configsCount[0] > 0) {
eglConfig = configs[0];
}
if (eglConfig == null) {
throw new RuntimeException("eglConfig not initialized");
}
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
eglContext = egl10.eglCreateContext(eglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
eglSurface = egl10.eglCreateWindowSurface(eglDisplay, eglConfig, texture, null);
if (eglSurface == null || eglSurface == EGL10.EGL_NO_SURFACE) {
int error = egl10.eglGetError();
if (error == EGL10.EGL_BAD_NATIVE_WINDOW) {
Log.e(TAG, "eglCreateWindowSurface returned EGL10.EGL_BAD_NATIVE_WINDOW");
return;
}
throw new RuntimeException("eglCreateWindowSurface failed " + android.opengl.GLUtils.getEGLErrorString(error));
}
if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException("eglMakeCurrent failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
}
}
private Pair<Camera.CameraInfo, Integer> getBackCamera() {
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
final int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; ++i) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
return new Pair<>(cameraInfo, i);
}
}
return null;
}
}
**This is my CameraFilter.java file**
public abstract class CameraFilter {
static final float SQUARE_COORDS[] = {
1.0f, -1.0f,
-1.0f, -1.0f,
1.0f, 1.0f,
-1.0f, 1.0f,
};
static final float TEXTURE_COORDS[] = {
1.0f, 0.0f,
0.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
};
static FloatBuffer VERTEX_BUF, TEXTURE_COORD_BUF;
static int PROGRAM = 0;
private static final int BUF_ACTIVE_TEX_UNIT = GLES20.GL_TEXTURE8;
private static RenderBuffer CAMERA_RENDER_BUF;
private static final float ROATED_TEXTURE_COORDS[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
};
private static FloatBuffer ROATED_TEXTURE_COORD_BUF;
final long START_TIME = System.currentTimeMillis();
int iFrame = 0;
public CameraFilter(Context context) {
// Setup default Buffers
if (VERTEX_BUF == null) {
VERTEX_BUF = ByteBuffer.allocateDirect(SQUARE_COORDS.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
VERTEX_BUF.put(SQUARE_COORDS);
VERTEX_BUF.position(0);
}
if (TEXTURE_COORD_BUF == null) {
TEXTURE_COORD_BUF = ByteBuffer.allocateDirect(TEXTURE_COORDS.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
TEXTURE_COORD_BUF.put(TEXTURE_COORDS);
TEXTURE_COORD_BUF.position(0);
}
if (ROATED_TEXTURE_COORD_BUF == null) {
ROATED_TEXTURE_COORD_BUF = ByteBuffer.allocateDirect(ROATED_TEXTURE_COORDS.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
ROATED_TEXTURE_COORD_BUF.put(ROATED_TEXTURE_COORDS);
ROATED_TEXTURE_COORD_BUF.position(0);
}
if (PROGRAM == 0) {
PROGRAM = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.original_rtt);
}
}
#CallSuper
public void onAttach() {
iFrame = 0;
}
final public void draw(int cameraTexId, int canvasWidth, int canvasHeight) {
// TODO move?
// Create camera render buffer
if (CAMERA_RENDER_BUF == null ||
CAMERA_RENDER_BUF.getWidth() != canvasWidth ||
CAMERA_RENDER_BUF.getHeight() != canvasHeight) {
CAMERA_RENDER_BUF = new RenderBuffer(canvasWidth, canvasHeight, BUF_ACTIVE_TEX_UNIT);
}
// Use shaders
GLES20.glUseProgram(PROGRAM);
int iChannel0Location = GLES20.glGetUniformLocation(PROGRAM, "iChannel0");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexId);
GLES20.glUniform1i(iChannel0Location, 0);
int vPositionLocation = GLES20.glGetAttribLocation(PROGRAM, "vPosition");
GLES20.glEnableVertexAttribArray(vPositionLocation);
GLES20.glVertexAttribPointer(vPositionLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, VERTEX_BUF);
int vTexCoordLocation = GLES20.glGetAttribLocation(PROGRAM, "vTexCoord");
GLES20.glEnableVertexAttribArray(vTexCoordLocation);
GLES20.glVertexAttribPointer(vTexCoordLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, ROATED_TEXTURE_COORD_BUF);
// Render to texture
CAMERA_RENDER_BUF.bind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
CAMERA_RENDER_BUF.unbind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
onDraw(CAMERA_RENDER_BUF.getTexId(), canvasWidth, canvasHeight);
iFrame ++;
}
abstract void onDraw(int cameraTexId, int canvasWidth, int canvasHeight);
void setupShaderInputs(int program, int[] iResolution, int[] iChannels, int[][] iChannelResolutions) {
setupShaderInputs(program, VERTEX_BUF, TEXTURE_COORD_BUF, iResolution, iChannels, iChannelResolutions);
}
void setupShaderInputs(int program, FloatBuffer vertex, FloatBuffer textureCoord, int[] iResolution, int[] iChannels, int[][] iChannelResolutions) {
GLES20.glUseProgram(program);
int iResolutionLocation = GLES20.glGetUniformLocation(program, "iResolution");
GLES20.glUniform3fv(iResolutionLocation, 1,
FloatBuffer.wrap(new float[]{(float) iResolution[0], (float) iResolution[1], 1.0f}));
float time = ((float) (System.currentTimeMillis() - START_TIME)) / 1000.0f;
int iGlobalTimeLocation = GLES20.glGetUniformLocation(program, "iGlobalTime");
GLES20.glUniform1f(iGlobalTimeLocation, time);
int iFrameLocation = GLES20.glGetUniformLocation(program, "iFrame");
GLES20.glUniform1i(iFrameLocation, iFrame);
int vPositionLocation = GLES20.glGetAttribLocation(program, "vPosition");
GLES20.glEnableVertexAttribArray(vPositionLocation);
GLES20.glVertexAttribPointer(vPositionLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, vertex);
int vTexCoordLocation = GLES20.glGetAttribLocation(program, "vTexCoord");
GLES20.glEnableVertexAttribArray(vTexCoordLocation);
GLES20.glVertexAttribPointer(vTexCoordLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, textureCoord);
for (int i = 0; i < iChannels.length; i ++) {
int sTextureLocation = GLES20.glGetUniformLocation(program, "iChannel" + i);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, iChannels[i]);
GLES20.glUniform1i(sTextureLocation, i);
}
float _iChannelResolutions[] = new float[iChannelResolutions.length * 3];
for (int i = 0; i < iChannelResolutions.length; i++) {
_iChannelResolutions[i*3] = iChannelResolutions[i][0];
_iChannelResolutions[i*3 + 1] = iChannelResolutions[i][1];
_iChannelResolutions[i*3 + 2] = 1.0f;
}
int iChannelResolutionLocation = GLES20.glGetUniformLocation(program, "iChannelResolution");
GLES20.glUniform3fv(iChannelResolutionLocation,
_iChannelResolutions.length, FloatBuffer.wrap(_iChannelResolutions));
}
public static void release() {
PROGRAM = 0;
CAMERA_RENDER_BUF = null;
}
}
Still getting an error
E/AndroidRuntime: FATAL EXCEPTION: Thread-1759 Process: giri.com.camerafilter, PID: 21244
java.lang.NullPointerException: Attempt to invoke virtual method 'void giri.com.camerafilter.RenderBuffer.unbind()' on a null object reference
at giri.com.camerafilter.filter.CameraFilter.draw(CameraFilter.java:126)
at giri.com.camerafilter.CameraRenderer.run(CameraRenderer.java:165)
You are getting a ClassCastException because you're trying to cast a TextView to TextureView.Change:
<TextView
android:id="#+id/textureView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
/>
to <TextureView...>:
<TextureView
android:id="#+id/textureView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
/>
There is classCastException which states that You are casting one class to another class which have different signatures. You have used AppCompatTextview in your xml and You are Casting it to TextView
you might be doing this
AppCompatTextView textView = (TextView) findViewById(R.id.your_id);
and you should do as follows
AppCompatTextView textView = (AppCompatTextView) findViewById(R.id.your_id);
Related
Not able to zoom while video recording using media recorder android
I want to implement zoom function while video recording in android.But, I am not able to access Camera parameters or Camera startSmoothZoom() method in neither main activity nor surface class. If you access camera parametera in media recorder method(prepareMediaRecorder()) it will throw null pointer exception. this activity class- in prepareMediaRecorder() method not able to access camera parameters and also not able to set startSmoothZoom(). here camera object giving null pointer exception. public class CustomCameraPreview extends BaseActivity implements OnClickListener, AlertPositiveListener, OrientationListener, ActivityCompat.OnRequestPermissionsResultCallback { RelativeLayout mLayout; MediaRecorder mediaRecorder; private PictureCallback mPictureCallback = new PictureCallback() { #Override public void onPictureTaken(byte[] data, Camera camera) { try { cameraData = data; captureAngle = getRotation(); mBitmap = BitmapFactory.decodeByteArray(data, 0, data.length); } catch (OutOfMemoryError e){ System.gc(); mBitmap = BitmapFactory.decodeByteArray(data, 0, data.length); } // int rotation=getRotation(); Matrix matrix = new Matrix(); matrix.postRotate(getRotation()); /*mBitmap = Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, true);*/ mBitmap = Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, false); if (mBitmap != null) { mButtonRetake.setEnabled(true); } else { Message.displayToast(CustomCameraPreview.this, getString(R.string.picture_not_taken)); } mCamera.release(); mButtonCapture.setEnabled(false); } }; protected void onCreate(){ initiCameraForVideo(); } private void initiCameraForVideo() { params = mCamera.getParameters(); mButtonCapture.setBackgroundResource(R.drawable.videostart); mShowCamera = new CameraSurfaceHolder(CustomCameraPreview.this, mCamera); mLayout = (RelativeLayout) findViewById(R.id.relativeLayout); mLayout.removeAllViews(); mLayout.addView(mShowCamera); List<Camera.Size> mSizeList_Video = null;// params.getSupportedPreviewSizes(); if (params.getSupportedVideoSizes() != null) { mSizeList_Video = params.getSupportedVideoSizes(); } else { // Video sizes may be null, which indicates that all the // supported preview sizes are supported for video // recording. mSizeList_Video = mCamera.getParameters() .getSupportedPreviewSizes(); } } #Override public void onClick(View v) { int viewId = v.getId(); switch (viewId) { case R.id.button_Capture: releaseCamera(); if (!prepareMediaRecorder()) { Message.displayToast( CustomCameraPreview.this, getString(R.string.somethign_went_wrong)); } else { mediaRecorder.start(); recording = true; } } private boolean prepareMediaRecorder() \*method to setup media player to record video { mCamera = isCameraAvailable(); mediaRecorder = new MediaRecorder(); mCamera.unlock(); mediaRecorder.setCamera(mCamera); mediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER); mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA); if(CamcorderProfile.hasProfile(findCameraID(), CamcorderProfile.QUALITY_480P)) { mediaRecorder.setProfile(CamcorderProfile .get(CamcorderProfile.QUALITY_480P)); }else{ mediaRecorder.setProfile(CamcorderProfile .get(CamcorderProfile.QUALITY_LOW)); } mediaRecorder.setOutputFile(getOutputVideoFile()); mediaRecorder.setMaxDuration(60000); // mediaRecorder.setMaxFileSize(100 * 1000 * 1000); mediaRecorder.setPreviewDisplay(mShowCamera.getHolder().getSurface()); try { mediaRecorder.prepare(); } } #Override protected void onPause() { super.onPause(); releaseMediaRecorder(); releaseCamera(); } private void releaseCamera() { if (mCamera != null) { mCamera.release(); mCamera = null; } } and this surface preview class- public class CameraSurfaceHolder extends SurfaceView implements SurfaceHolder.Callback { private static final String TAG = "CameraSurfaceHolder"; Context mContext; private String errorMessage = ""; private SurfaceHolder mSurfaceHolder; private Camera mCamera; public CameraSurfaceHolder(Context context, Camera camera) { super(context); mContext = context; mCamera = camera; mSurfaceHolder = getHolder(); mSurfaceHolder.addCallback(this); } #Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { /*if (holder.getSurface() == null){ // preview surface does not exist return; } // stop preview before making changes try { mCamera.stopPreview(); } catch (Exception e){ // ignore: tried to stop a non-existent preview } // set preview size and make any resize, rotate or // reformatting changes here setCameraDisplayOrientation((Activity)mContext, Camera.CameraInfo.CAMERA_FACING_BACK, mCamera); // start preview with new settings try { mCamera.setPreviewDisplay(holder); mCamera.startPreview(); } */ } #Override public void surfaceCreated(SurfaceHolder holder) { try { mCamera.setPreviewDisplay(mSurfaceHolder); mCamera.startPreview(); } catch (Exception e) { Logger.ex(e); } } public void setCameraDisplayOrientation(Activity activity, int cameraId, Camera camera) { Camera.CameraInfo info = new Camera.CameraInfo(); Camera.getCameraInfo(cameraId, info); /*Display mDisplay = ((WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); int rotation = mDisplay.getDisplayId();*/ int rotation = activity.getWindowManager().getDefaultDisplay() .getRotation(); int degrees = 0; switch (rotation) { case Surface.ROTATION_0: degrees = 0; break; case Surface.ROTATION_90: degrees = 90; break; case Surface.ROTATION_180: degrees = 180; break; case Surface.ROTATION_270: degrees = 270; break; } int result; if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { result = (info.orientation + degrees) % 360; result = (360 - result) % 360; // compensate the mirror } else { // back-facing result = (info.orientation - degrees + 360) % 360; } Camera.Parameters mParameters = camera.getParameters(); mParameters.setRotation(rotation); camera.setDisplayOrientation(result); camera.setParameters(mParameters); } #Override public void surfaceDestroyed(SurfaceHolder holder) { } } in above class I added some code like below and when user touch on camera preview its throwing null pointer exception in onTouchEvent() on access of camera paramters. Also tried like I set again camera object to surface in activity after configure media recorder(prepareMediaRecorder()), but zoom function working but video is not recording. #Override public boolean onTouchEvent(MotionEvent event) { // Get the pointer ID Camera.Parameters params = mCamera.getParameters(); int action = event.getAction(); if (event.getPointerCount() > 1) { // handle multi-touch events if (action == MotionEvent.ACTION_POINTER_DOWN) { mDist = getFingerSpacing(event); } else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) { mCamera.cancelAutoFocus(); handleZoom(event, params); } } else { // handle single touch events if (action == MotionEvent.ACTION_UP) { handleFocus(event, params); } } return true; } private void handleZoom(MotionEvent event, Camera.Parameters params) { int maxZoom = params.getMaxZoom(); int zoom = params.getZoom(); float newDist = getFingerSpacing(event); if (newDist > mDist) { //zoom in if (zoom < maxZoom) zoom++; } else if (newDist < mDist) { //zoom out if (zoom > 0) zoom--; } mDist = newDist; params.setZoom(zoom); mCamera.setParameters(params); } public void handleFocus(MotionEvent event, Camera.Parameters params) { int pointerId = event.getPointerId(0); int pointerIndex = event.findPointerIndex(pointerId); // Get the pointer's current position float x = event.getX(pointerIndex); float y = event.getY(pointerIndex); List<String> supportedFocusModes = params.getSupportedFocusModes(); if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { mCamera.autoFocus(new Camera.AutoFocusCallback() { #Override public void onAutoFocus(boolean b, Camera camera) { // currently set to auto-focus on single touch } }); } } /** Determine the space between the first two fingers */ private float getFingerSpacing(MotionEvent event) { // ... float x = event.getX(0) - event.getX(1); float y = event.getY(0) - event.getY(1); return (float)Math.sqrt(x * x + y * y); }
Trouble getting order of Image Bitmap layers in Android correct
I have a piece of code that compares to images and places a marker on the difference. So far it works well, except the latest marker layer that is added always shows underneath all the older markers. I have the latest one as a yellow color and the older ones as red. When the difference is close to one of the red markers, the yellow marker shows behind those ones. Is there anyone that can help me get the yellow (Latest marker) to appear on top? This is my code so far: public class CheckmarkActivity extends AppCompatActivity implements ZoomLayout.OnZoomableLayoutClickEventListener { TextView tv; RelativeLayout relativeLayout_work; ImageView imageViewtest; Bitmap prevBmp = null; Timer t; TimerTask task; int time = 100; float image_Width; float image_Height; #Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_checkmark); if (getResources().getBoolean(R.bool.is_tablet)) { setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); } tv = findViewById(R.id.tv); relativeLayout_work = findViewById(R.id.relativeLayout_work); imageViewtest = findViewById(R.id.imageViewtest); prevBmp = ViewcontrollerActivity.workSession.getLastScreenShot(); if (prevBmp == null || ViewcontrollerActivity.workSession.workScreenShot == null) { setResult(Activity.RESULT_CANCELED); finish(); } startTimer(); } // image compare class TestAsync extends AsyncTask<Object, Integer, String> { String TAG = getClass().getSimpleName(); PointF p; Bitmap test_3; protected void onPreExecute (){ super.onPreExecute(); Log.d(TAG + " PreExceute","On pre Exceute......"); } protected String doInBackground(Object...arg0) { test_3 = ImageHelper.findDifference(CheckmarkActivity.this, prevBmp, ViewcontrollerActivity.workSession.workScreenShot); p = ImageHelper.findShot(test_3); time = 1; return "You are at PostExecute"; } protected void onProgressUpdate(Integer...a){ super.onProgressUpdate(a); } protected void onPostExecute(String result) { super.onPostExecute(result); addImageToImageview(); PointF np = Session.convertPointBitmap2View(p, relativeLayout_work, ViewcontrollerActivity.workSession.workScreenShot); tv.setX(np.x - tv.getWidth() / 2); tv.setY(np.y - tv.getHeight() / 2); tv.setVisibility(View.VISIBLE); // imageViewtest.setImageBitmap(test_3); } } private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) { #Override public void onManagerConnected(int status) { switch (status) { case LoaderCallbackInterface.SUCCESS: { Log.i("OpenCV", "OpenCV loaded successfully"); new TestAsync().execute(); } break; default: { super.onManagerConnected(status); } break; } } }; #Override protected void onResume() { super.onResume(); if (!OpenCVLoader.initDebug()) { Log.d("OpenCV", "Internal OpenCV library not found. Using OpenCV Manager for initialization"); OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback); } else { Log.d("OpenCV", "OpenCV library found inside package. Using it!"); mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS); } } public static int[] getBitmapOffset(ImageView img, Boolean includeLayout) { int[] offset = new int[2]; float[] values = new float[9]; Matrix m = img.getImageMatrix(); m.getValues(values); offset[0] = (int) values[5]; offset[1] = (int) values[2]; if (includeLayout) { ViewGroup.MarginLayoutParams lp = (ViewGroup.MarginLayoutParams) img.getLayoutParams(); int paddingTop = (int) (img.getPaddingTop() ); int paddingLeft = (int) (img.getPaddingLeft() ); offset[0] += paddingTop + lp.topMargin; offset[1] += paddingLeft + lp.leftMargin; } return offset; } public static int[] getBitmapPositionInsideImageView(ImageView imageView) { int[] ret = new int[4]; if (imageView == null || imageView.getDrawable() == null) return ret; // Get image dimensions // Get image matrix values and place them in an array float[] f = new float[9]; imageView.getImageMatrix().getValues(f); // Extract the scale values using the constants (if aspect ratio maintained, scaleX == scaleY) final float scaleX = f[Matrix.MSCALE_X]; final float scaleY = f[Matrix.MSCALE_Y]; // Get the drawable (could also get the bitmap behind the drawable and getWidth/getHeight) final Drawable d = imageView.getDrawable(); final int origW = d.getIntrinsicWidth(); final int origH = d.getIntrinsicHeight(); // Calculate the actual dimensions final int actW = Math.round(origW * scaleX); final int actH = Math.round(origH * scaleY); ret[2] = actW; ret[3] = actH; // Get image position // We assume that the image is centered into ImageView int imgViewW = imageView.getWidth(); int imgViewH = imageView.getHeight(); int top = (int) (imgViewH - actH)/2; int left = (int) (imgViewW - actW)/2; ret[0] = left; ret[1] = top; return ret; } private void addImageToImageview(){ if (ViewcontrollerActivity.workSession.workScreenShot != null) { imageViewtest.setImageBitmap(ViewcontrollerActivity.workSession.workScreenShot); Log.d("width", String.valueOf(imageViewtest.getWidth())); } Resources r = getResources(); float px = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 20, r.getDisplayMetrics()); for (int i = 0; i < ViewcontrollerActivity.workSession.getShotCount(); i++) { PointF p = ViewcontrollerActivity.workSession.getPoint(i); TextView t = new TextView(this); t.setText("" + (i + 1)); RelativeLayout.LayoutParams param = new RelativeLayout.LayoutParams((int)px, (int)px); relativeLayout_work.addView(t); t.setLayoutParams(param); t.setGravity(Gravity.CENTER); t.setBackgroundResource(R.drawable.circle); p = Session.convertPointBitmap2View(p, relativeLayout_work, ViewcontrollerActivity.workSession.workScreenShot); t.setX(p.x); t.setY(p.y); t.setTag(10000 + i); } } public void onConfirm(View v){ View vv = findViewById(R.id.relativeLayout_work); PointF bp = Session.convertPointView2Bitmap(new PointF(tv.getX(), tv.getY()), relativeLayout_work, ViewcontrollerActivity.workSession.workScreenShot); ViewcontrollerActivity.workSession.addNewShot(ViewcontrollerActivity.workSession.workScreenShot, bp); setResult(Activity.RESULT_OK); finish(); } public void onCancel(View v){ setResult(Activity.RESULT_CANCELED); finish(); } #Override public void onBackPressed() { setResult(Activity.RESULT_CANCELED); finish(); } #Override public void OnContentClickEvent(int action, float xR, float yR) { int[] offset = new int[2]; int[] rect = new int[4]; offset = this.getBitmapOffset(imageViewtest, false); int original_width = imageViewtest.getDrawable().getIntrinsicWidth(); int original_height = imageViewtest.getDrawable().getIntrinsicHeight(); rect = getBitmapPositionInsideImageView(imageViewtest); Log.i("OffsetY", String.valueOf(offset[0])); Log.i("OffsetX", String.valueOf(offset[1])); Log.i( "0", String.valueOf(rect[0])); Log.i( "1", String.valueOf(rect[1])); Log.i( "2", String.valueOf(rect[2])); Log.i( "3", String.valueOf(rect[3])); if (xR > rect[0] && xR < rect[0] + rect[2] && yR > rect[1] && yR < rect[1] + rect[3]) { tv.setX(xR - tv.getWidth() / 2); tv.setY(yR - tv.getHeight() / 2); } // tv.setX(xR - tv.getWidth() / 2); // tv.setY(yR - tv.getHeight() / 2); } public void onMoveButtonPressed(View v) { ImageButton b = (ImageButton)v; int mId = b.getId(); switch (mId) { case R.id.imageButtonL: tv.setX(tv.getX() - 1); break; case R.id.imageButtonR: tv.setX(tv.getX() + 1); break; case R.id.imageButtonD: tv.setY(tv.getY() + 1); break; case R.id.imageButtonU: tv.setY(tv.getY() - 1); break; } } //timer change image public void startTimer(){ t = new Timer(); task = new TimerTask() { #Override public void run() { runOnUiThread(new Runnable() { #Override public void run() { if (time == 1){ imageViewtest.setImageBitmap(ViewcontrollerActivity.workSession.workScreenShot); // tv.setVisibility(View.VISIBLE); tv.setText("" + (ViewcontrollerActivity.workSession.getShotCount() + 1)); t.cancel(); return; } if (time % 2 == 0) { imageViewtest.setImageBitmap(prevBmp); } else if(time % 2 == 1){ imageViewtest.setImageBitmap(ViewcontrollerActivity.workSession.workScreenShot); } time --; } }); } }; t.scheduleAtFixedRate(task, 0, 500); } }
You can give the z-order of the child view with addView() function. void addView (View child, int index) ex) private void addImageToImageview(){ if (ViewcontrollerActivity.workSession.workScreenShot != null) { imageViewtest.setImageBitmap(ViewcontrollerActivity.workSession.workScreenShot); Log.d("width", String.valueOf(imageViewtest.getWidth())); } Resources r = getResources(); float px = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 20, r.getDisplayMetrics()); int currChildrenCount = relativeLayout_work.getChildCount(); for (int i = 0; i < ViewcontrollerActivity.workSession.getShotCount(); i++) { PointF p = ViewcontrollerActivity.workSession.getPoint(i); TextView t = new TextView(this); t.setText("" + (i + 1)); RelativeLayout.LayoutParams param = new RelativeLayout.LayoutParams((int)px, (int)px); relativeLayout_work.addView(t, currChildrenCount+i); // You can control the order like this t.setLayoutParams(param); t.setGravity(Gravity.CENTER); t.setBackgroundResource(R.drawable.circle); p = Session.convertPointBitmap2View(p, relativeLayout_work, ViewcontrollerActivity.workSession.workScreenShot); t.setX(p.x); t.setY(p.y); t.setTag(10000 + i); } }
How to display a watermark on the camera preview
I'm making a motion detector app on top of this project. I would like to display a watermark image on the camera preview. I tried this method, but it didn't work for me. Please explain in code, how to show the watermark without having any issue for the motion detection part. MotionDetection.java public class MotionDetectionActivity extends SensorsActivity { private static final String TAG = "MotionDetectionActivity"; private static SurfaceView preview = null; private static SurfaceHolder previewHolder = null; private static Camera camera = null; private static boolean inPreview = false; private static long mReferenceTime = 0; private static IMotionDetection detector = null; private static volatile AtomicBoolean processing = new AtomicBoolean(false); /** * {#inheritDoc} */ #Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); preview = (SurfaceView) findViewById(R.id.preview); previewHolder = preview.getHolder(); previewHolder.addCallback(surfaceCallback); previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); if (Preferences.USE_RGB) { detector = new RgbMotionDetection(); } else if (Preferences.USE_LUMA) { detector = new LumaMotionDetection(); } else { // Using State based (aggregate map) detector = new AggregateLumaMotionDetection(); } } /** * {#inheritDoc} */ #Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); } /** * {#inheritDoc} */ #Override public void onPause() { super.onPause(); camera.setPreviewCallback(null); if (inPreview) camera.stopPreview(); inPreview = false; camera.release(); camera = null; } /** * {#inheritDoc} */ #Override public void onResume() { super.onResume(); camera = Camera.open(); } private PreviewCallback previewCallback = new PreviewCallback() { /** * {#inheritDoc} */ #Override public void onPreviewFrame(byte[] data, Camera cam) { if (data == null) return; Camera.Size size = cam.getParameters().getPreviewSize(); if (size == null) return; if (!GlobalData.isPhoneInMotion()) { DetectionThread thread = new DetectionThread(data, size.width, size.height); thread.start(); } } }; private SurfaceHolder.Callback surfaceCallback = new SurfaceHolder.Callback() { /** * {#inheritDoc} */ #Override public void surfaceCreated(SurfaceHolder holder) { try { camera.setPreviewDisplay(previewHolder); camera.setPreviewCallback(previewCallback); } catch (Throwable t) { Log.e("PreviewDemo-surfaceCallback", "Exception in setPreviewDisplay()", t); } } /** * {#inheritDoc} */ #Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { Camera.Parameters parameters = camera.getParameters(); Camera.Size size = getBestPreviewSize(width, height, parameters); if (size != null) { parameters.setPreviewSize(size.width, size.height); Log.d(TAG, "Using width=" + size.width + " height=" + size.height); } camera.setParameters(parameters); camera.startPreview(); inPreview = true; } /** * {#inheritDoc} */ #Override public void surfaceDestroyed(SurfaceHolder holder) { // Ignore } }; private static Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) { Camera.Size result = null; for (Camera.Size size : parameters.getSupportedPreviewSizes()) { if (size.width <= width && size.height <= height) { if (result == null) { result = size; } else { int resultArea = result.width * result.height; int newArea = size.width * size.height; if (newArea > resultArea) result = size; } } } return result; } private static final class DetectionThread extends Thread { private byte[] data; private int width; private int height; public DetectionThread(byte[] data, int width, int height) { this.data = data; this.width = width; this.height = height; } /** * {#inheritDoc} */ #Override public void run() { if (!processing.compareAndSet(false, true)) return; // Log.d(TAG, "BEGIN PROCESSING..."); try { // Previous frame int[] pre = null; if (Preferences.SAVE_PREVIOUS) pre = detector.getPrevious(); // Current frame (with changes) // long bConversion = System.currentTimeMillis(); int[] img = null; if (Preferences.USE_RGB) { img = ImageProcessing.decodeYUV420SPtoRGB(data, width, height); } else { img = ImageProcessing.decodeYUV420SPtoLuma(data, width, height); } // long aConversion = System.currentTimeMillis(); // Log.d(TAG, "Converstion="+(aConversion-bConversion)); // Current frame (without changes) int[] org = null; if (Preferences.SAVE_ORIGINAL && img != null) org = img.clone(); if (img != null && detector.detect(img, width, height)) { // The delay is necessary to avoid taking a picture while in // the // middle of taking another. This problem can causes some // phones // to reboot. long now = System.currentTimeMillis(); if (now > (mReferenceTime + Preferences.PICTURE_DELAY)) { mReferenceTime = now; Bitmap previous = null; if (Preferences.SAVE_PREVIOUS && pre != null) { if (Preferences.USE_RGB) previous = ImageProcessing.rgbToBitmap(pre, width, height); else previous = ImageProcessing.lumaToGreyscale(pre, width, height); } Bitmap original = null; if (Preferences.SAVE_ORIGINAL && org != null) { if (Preferences.USE_RGB) original = ImageProcessing.rgbToBitmap(org, width, height); else original = ImageProcessing.lumaToGreyscale(org, width, height); } Bitmap bitmap = null; if (Preferences.SAVE_CHANGES) { if (Preferences.USE_RGB) bitmap = ImageProcessing.rgbToBitmap(img, width, height); else bitmap = ImageProcessing.lumaToGreyscale(img, width, height); } Log.i(TAG, "Saving.. previous=" + previous + " original=" + original + " bitmap=" + bitmap); Looper.prepare(); new SavePhotoTask().execute(previous, original, bitmap); } else { Log.i(TAG, "Not taking picture because not enough time has passed since the creation of the Surface"); } } } catch (Exception e) { e.printStackTrace(); } finally { processing.set(false); } // Log.d(TAG, "END PROCESSING..."); processing.set(false); } }; private static final class SavePhotoTask extends AsyncTask<Bitmap, Integer, Integer> { /** * {#inheritDoc} */ #Override protected Integer doInBackground(Bitmap... data) { for (int i = 0; i < data.length; i++) { Bitmap bitmap = data[i]; String name = String.valueOf(System.currentTimeMillis()); if (bitmap != null) save(name, bitmap); } return 1; } private void save(String name, Bitmap bitmap) { File photo = new File(Environment.getExternalStorageDirectory(), name + ".jpg"); if (photo.exists()) photo.delete(); try { FileOutputStream fos = new FileOutputStream(photo.getPath()); bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos); fos.close(); } catch (java.io.IOException e) { Log.e("PictureDemo", "Exception in photoCallback", e); } } } } main.xml <?xml version="1.0" encoding="utf-8"?> <SurfaceView xmlns:android="http://schemas.android.com/apk/res/android" android:id="#+id/preview" android:layout_width="fill_parent" android:layout_height="fill_parent"> </SurfaceView>
A workaround option is to overlay the Activity XML file with another XML file which contains the watermark image. To do so: Create a new Layout file inside the layout folder. For eg: overlay.xml Insert an ImageView inside it, something like: <RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" android:layout_width="match_parent" android:layout_height="match_parent" xmlns:app="http://schemas.android.com/apk/res-auto"> <ImageView android:id="#+id/imageView1" android:layout_centerInParent="true" android:layout_width="wrap_content" android:layout_height="wrap_content" android:src="#drawable/android" /> </RelativeLayout> Then inside the Activity java file, ie MotionDetector.java file , create a new method addView() : private void addView() { controlInflater = LayoutInflater.from(getBaseContext()); View viewControl = controlInflater.inflate(R.layout.overlay, null); LayoutParams layoutParamsControl = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT); this.addContentView(viewControl, layoutParamsControl); } 4) Finally invoke the addView() method from onCreate() to add the image: super.onCreate(savedInstanceState); setContentView(R.layout.main); preview = (SurfaceView) findViewById(R.id.preview); previewHolder = preview.getHolder(); previewHolder.addCallback(surfaceCallback); previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); addView(); Then end result would be an image overlay above the SurfaceView. Subject to the quality of the watermark image, the rendered quality of the watermark shall seem original. Hope it helps.
App crashes on pressing the back button from a Camera Activity
I'm trying to make a motion detector app that captures images on the motion detection. Its working fine & saving the images. The problem is that the app crashes when I press back button from the camera activity to return to the home activity. How to fix it ? Here is my code: public class MotionDetectionActivity extends SensorsActivity { private static final String TAG = "MotionDetectionActivity"; private static final String ENABLE_MOTION_DETECTION="switch_md"; private static SurfaceView preview = null; private static SurfaceHolder previewHolder = null; private static Camera camera = null; private static boolean inPreview = false; private static long mReferenceTime = 0; private static IMotionDetection detector = null; public static MediaPlayer song; public static Vibrator mVibrator; private static volatile AtomicBoolean processing = new AtomicBoolean(false); public int MY_PERMISSIONS_REQUEST_CAMERA; /** * {#inheritDoc} */ #Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.main); SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(this); boolean enablemotionpref = sharedPref.getBoolean(ENABLE_MOTION_DETECTION, true); song = MediaPlayer.create(this, R.raw.sound); mVibrator = (Vibrator)this.getSystemService(VIBRATOR_SERVICE); preview = (SurfaceView) findViewById(R.id.preview); previewHolder = preview.getHolder(); previewHolder.addCallback(surfaceCallback); previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); if (enablemotionpref) { if (Preferences.USE_RGB) { detector = new RgbMotionDetection(); } else if (Preferences.USE_LUMA) { detector = new LumaMotionDetection(); } else { // Using State based (aggregate map) detector = new AggregateLumaMotionDetection(); } } } /** * {#inheritDoc} */ #Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); } /** * {#inheritDoc} */ #Override public void onPause() { super.onPause(); if(song!=null && song.isPlaying()) { song.stop();} camera.setPreviewCallback(null); if (inPreview) camera.stopPreview(); inPreview = false; camera.release(); camera = null; } /** * {#inheritDoc} */ #Override public void onResume() { super.onResume(); camera = Camera.open(); } private PreviewCallback previewCallback = new PreviewCallback() { /** * {#inheritDoc} */ #Override public void onPreviewFrame(byte[] data, Camera cam) { if (data == null) return; Camera.Size size = cam.getParameters().getPreviewSize(); if (size == null) return; if (!GlobalData.isPhoneInMotion()) { DetectionThread thread = new DetectionThread(data, size.width, size.height); thread.start(); } } }; private SurfaceHolder.Callback surfaceCallback = new SurfaceHolder.Callback() { /** * {#inheritDoc} */ #Override public void surfaceCreated(SurfaceHolder holder) { try { camera.setPreviewDisplay(previewHolder); camera.setPreviewCallback(previewCallback); } catch (Throwable t) { Log.e("Callback", "Exception in setPreviewDisplay()", t); } } /** * {#inheritDoc} */ #Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { Camera.Parameters parameters = camera.getParameters(); Camera.Size size = getBestPreviewSize(width, height, parameters); if (size != null) { parameters.setPreviewSize(size.width, size.height); Log.d(TAG, "Using width=" + size.width + " height=" + size.height); } camera.setParameters(parameters); camera.startPreview(); inPreview = true; } /** * {#inheritDoc} */ #Override public void surfaceDestroyed(SurfaceHolder holder) { // Ignore } }; private static Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) { Camera.Size result = null; for (Camera.Size size : parameters.getSupportedPreviewSizes()) { if (size.width <= width && size.height <= height) { if (result == null) { result = size; } else { int resultArea = result.width * result.height; int newArea = size.width * size.height; if (newArea > resultArea) result = size; } } } return result; } private static final class DetectionThread extends Thread { private byte[] data; private int width; private int height; public DetectionThread(byte[] data, int width, int height) { this.data = data; this.width = width; this.height = height; } /** * {#inheritDoc} */ #Override public void run() { if (!processing.compareAndSet(false, true)) return; // Log.d(TAG, "BEGIN PROCESSING..."); try { // Previous frame int[] pre = null; if (Preferences.SAVE_PREVIOUS) pre = detector.getPrevious(); // Current frame (with changes) // long bConversion = System.currentTimeMillis(); int[] img = null; if (Preferences.USE_RGB) { img = ImageProcessing.decodeYUV420SPtoRGB(data, width, height); if (img != null && detector.detect(img, width, height)) { if(song!=null && !song.isPlaying()) { song.start(); mVibrator.vibrate(50); } } else { if(song!=null && song.isPlaying()) { song.pause(); } } } // Current frame (without changes) int[] org = null; if (Preferences.SAVE_ORIGINAL && img != null) org = img.clone(); if (img != null && detector.detect(img, width, height)) { // The delay is necessary to avoid taking a picture while in // the // middle of taking another. This problem can causes some // phones // to reboot. long now = System.currentTimeMillis(); if (now > (mReferenceTime + Preferences.PICTURE_DELAY)) { mReferenceTime = now; Bitmap previous = null; if (Preferences.SAVE_PREVIOUS && pre != null) { if (Preferences.USE_RGB) previous = ImageProcessing.rgbToBitmap(pre, width, height); else previous = ImageProcessing.lumaToGreyscale(pre, width, height); } Bitmap original = null; if (Preferences.SAVE_ORIGINAL && org != null) { if (Preferences.USE_RGB) original = ImageProcessing.rgbToBitmap(org, width, height); else original = ImageProcessing.lumaToGreyscale(org, width, height); } Bitmap bitmap = null; if (Preferences.SAVE_CHANGES) { if (Preferences.USE_RGB) bitmap = ImageProcessing.rgbToBitmap(img, width, height); else bitmap = ImageProcessing.lumaToGreyscale(img, width, height); } Log.i(TAG, "Saving.. previous=" + previous + " original=" + original + " bitmap=" + bitmap); Looper.prepare(); new SavePhotoTask().execute(previous, original, bitmap); } else { Log.i(TAG, "Not taking picture because not enough time has passed since the creation of the Surface"); } } } catch (Exception e) { e.printStackTrace(); } finally { processing.set(false); } // Log.d(TAG, "END PROCESSING..."); processing.set(false); } }; private static final class SavePhotoTask extends AsyncTask<Bitmap, Integer, Integer> { /** * {#inheritDoc} */ #Override protected Integer doInBackground(Bitmap... data) { for (int i = 0; i < data.length; i++) { Bitmap bitmap = data[i]; String name = "MotDet_"+String.valueOf(System.currentTimeMillis()); if (bitmap != null) createDirectoryAndSaveFile(name, bitmap); } return 1; } private void createDirectoryAndSaveFile(String name, Bitmap bitmap) { File folder = new File(Environment.getExternalStorageDirectory() + File.separator + "MD");//here you have created different name boolean success = true; if (!folder.exists()) { success = folder.mkdirs(); } if (success) { // Do something on success } else { // Do something else on failure } File photo = new File(folder.getAbsolutePath(), name+ ".jpg"); //use path of above created folder if (photo.exists()) { photo.delete(); } try { FileOutputStream out = new FileOutputStream(photo.getPath()); bitmap.compress(Bitmap.CompressFormat.JPEG, 100, out); out.flush(); out.close(); } catch (Exception e) { e.printStackTrace(); } } } } HomeActivity public class HomeActivity extends AppCompatActivity { #Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.home_layout); Button bt1 = (Button) findViewById(R.id.button); Button bt2= (Button)findViewById(R.id.button1); bt1.setOnClickListener(new View.OnClickListener() { #Override public void onClick(View view) { Intent i = new Intent(view.getContext(), MotionDetectionActivity.class); startActivity(i); } }); bt2.setOnClickListener(new View.OnClickListener() { #Override public void onClick(View vew) { Intent b=new Intent(vew.getContext(),SettingsActivity.class); startActivity(b); } }); } } Stacktrace
handle like this an see if it still crashes #Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { if(camera != null) { Camera.Parameters parameters = camera.getParameters(); Camera.Size size = getBestPreviewSize(width, height, parameters); if (size != null) { parameters.setPreviewSize(size.width, size.height); Log.d(TAG, "Using width=" + size.width + " height=" + size.height); } camera.setParameters(parameters); camera.startPreview(); inPreview = true; } }
ImageView still visible after setVisibility(View.INVISIBLE)
I am trying to make a camera app that shows an overlay when a picture is taken. When this overlay is shown the other UI components (except for the FrameLayout that shows the picture) should go invisible. But it seems that while my 2 imagebuttons go invisble, my imageview(ivCompass) doesn't. Here is the code that gets called when a picture is taken Camera.PictureCallback mPicture = new Camera.PictureCallback() { #Override public void onPictureTaken(byte[] data, Camera camera) { //create a new intent... String path = createFile(data); intent = new Intent(); intent.putExtra("path", path); mBearingProvider.updateBearing(); bearing = mBearingProvider.getBearing(); cardinalDirection = bearingToString(bearing); //((TextView) findViewById(R.id.tvPicDirection)).setText(cardinalDirection); Log.e("Direction", cardinalDirection + "," + bearing); findViewById(R.id.btnFlash).setVisibility(View.INVISIBLE); findViewById(R.id.btnCapture).setVisibility(View.INVISIBLE); findViewById(R.id.ivCompass).setVisibility(View.INVISIBLE); findViewById(R.id.pictureOverlay).setVisibility(View.VISIBLE); } }; And here is the layout file <?xml version="1.0" encoding="utf-8"?> <RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" android:layout_width="match_parent" android:layout_height="match_parent"> <FrameLayout android:id="#+id/camera_preview" android:layout_width="match_parent" android:layout_height="match_parent"> </FrameLayout> <ImageButton android:id="#+id/btnFlash" android:layout_width="50dp" android:layout_height="50dp" android:layout_centerVertical="true" android:layout_margin="10dp" android:src="#drawable/camera_flash_on" android:background="#drawable/circle_flash" android:onClick="changeFlashMode"/> <ImageButton android:id="#+id/btnCapture" android:layout_width="50dp" android:layout_height="50dp" android:layout_alignParentRight="true" android:layout_centerVertical="true" android:layout_margin="10dp" android:src="#drawable/icon_camera" android:background="#drawable/circle_camera"/> <ImageView android:id="#+id/ivCompass" android:layout_width="100dp" android:layout_height="100dp" android:layout_alignParentRight="true" android:src="#drawable/camera_compass" android:background="#android:color/transparent"/> <RelativeLayout android:id="#+id/pictureOverlay" android:layout_width="match_parent" android:layout_height="match_parent" android:layout_margin="20dp" android:background="#color/alphaBlack" android:visibility="invisible"> </RelativeLayout> I think It's just a mistake with naming, syntax or something like that, but I can't seem to find it. EDIT: Here is the entire Activity public class CameraActivity extends AppCompatActivity implements BearingToNorthProvider.ChangeEventListener { private Camera mCamera; private CameraView mCameraView; private float mDist = 0f; private String flashMode; private ImageButton flashButton; private Intent intent; private BearingToNorthProvider mBearingProvider; private double bearing; private double currentBearing = 0d; private String cardinalDirection = "?"; private final int REQUEST_CODE_ASK_PERMISSIONS = 2; #Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_camera); mCamera = getCameraInstance(); mCameraView = new CameraView(this, mCamera); FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview); preview.addView(mCameraView); ImageButton captureButton = (ImageButton) findViewById(R.id.btnCapture); captureButton.setOnClickListener(new View.OnClickListener() { #Override public void onClick(View v) { Camera.Parameters params = mCamera.getParameters(); params.setFlashMode(flashMode); mCamera.setParameters(params); mCamera.takePicture(null, null, mPicture); } }); SharedPreferences sharedPref = this.getSharedPreferences(getString(R.string.apiKey), Context.MODE_PRIVATE); flashMode = sharedPref.getString(getString(R.string.flashMode), Camera.Parameters.FLASH_MODE_OFF); flashButton = (ImageButton) findViewById(R.id.btnFlash); setFlashButton(); mBearingProvider = new BearingToNorthProvider(this,this); mBearingProvider.setChangeEventListener(this); mBearingProvider.start(); } #Override protected void onPause() { super.onPause(); mBearingProvider.stop(); } /** * Helper method to access the camera returns null if it cannot get the * camera or does not exist * * #return the instance of the camera */ private Camera getCameraInstance() { Camera camera = null; try { camera = Camera.open(); } catch (Exception e) { Log.e("CamException", e.toString()); } return camera; } Camera.PictureCallback mPicture = new Camera.PictureCallback() { #Override public void onPictureTaken(byte[] data, Camera camera) { //create a new intent... String path = createFile(data); intent = new Intent(); intent.putExtra("path", path); mBearingProvider.updateBearing(); bearing = mBearingProvider.getBearing(); cardinalDirection = bearingToString(bearing); //((TextView) findViewById(R.id.tvPicDirection)).setText(cardinalDirection); Log.e("Direction", cardinalDirection + "," + bearing); findViewById(R.id.btnFlash).setVisibility(View.INVISIBLE); findViewById(R.id.btnCapture).setVisibility(View.INVISIBLE); findViewById(R.id.ivCompass).setVisibility(View.INVISIBLE); findViewById(R.id.pictureOverlay).setVisibility(View.VISIBLE); } }; private void confirmPicture(View v) { /*String direction = String.valueOf(((TextView) findViewById(R.id.tvPicDirection)).getText()); String description = String.valueOf(((EditText) findViewById(R.id.tvPicDescription)).getText()); intent.putExtra("direction", direction); intent.putExtra("description", description);*/ //close this Activity... setResult(Activity.RESULT_OK, intent); finish(); } //region File Methods /** * Method that creates a file from the given byte array and saves the file in the Pictures Directory * #param data is the array of bytes that represent the picture taken by the camera * #return the path of created file */ private String createFile(byte[] data){ checkFilePermissions(); File picFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES) + File.separator + "tempPic.jpg" + File.separator); String path = picFile.getPath(); try { BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(picFile)); bos.write(data); bos.flush(); bos.close(); return path; } catch (IOException e) { e.printStackTrace(); return ""; } } /** * Checks the permission for reading to and writing from the external storage */ private void checkFilePermissions() { if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) { int hasWriteExternalStoragePermission = checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE); if (hasWriteExternalStoragePermission != PackageManager.PERMISSION_GRANTED) { requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_CODE_ASK_PERMISSIONS); return; } } } //endregion //region Zoom Methods #Override public boolean onTouchEvent(MotionEvent event) { // Get the pointer ID Camera.Parameters params = mCamera.getParameters(); int action = event.getAction(); if (event.getPointerCount() > 1) { // handle multi-touch events if (action == MotionEvent.ACTION_POINTER_DOWN) { mDist = getFingerSpacing(event); } else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) { mCamera.cancelAutoFocus(); handleZoom(event, params); } } else { // handle single touch events if (action == MotionEvent.ACTION_UP) { handleFocus(event, params); } } return true; } private void handleZoom(MotionEvent event, Camera.Parameters params) { int maxZoom = params.getMaxZoom(); int zoom = params.getZoom(); float newDist = getFingerSpacing(event); if (newDist > mDist) { //zoom in if (zoom < maxZoom) zoom++; } else if (newDist < mDist) { //zoom out if (zoom > 0) zoom--; } mDist = newDist; params.setZoom(zoom); mCamera.setParameters(params); } public void handleFocus(MotionEvent event, Camera.Parameters params) { int pointerId = event.getPointerId(0); int pointerIndex = event.findPointerIndex(pointerId); // Get the pointer's current position float x = event.getX(pointerIndex); float y = event.getY(pointerIndex); List<String> supportedFocusModes = params.getSupportedFocusModes(); if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { mCamera.autoFocus(new Camera.AutoFocusCallback() { #Override public void onAutoFocus(boolean b, Camera camera) { // currently set to auto-focus on single touch } }); } } /** Determine the space between the first two fingers */ private float getFingerSpacing(MotionEvent event) { double x = event.getX(0) - event.getX(1); double y = event.getY(0) - event.getY(1); return (float) Math.sqrt(x * x + y * y); } //endregion //region Flash Methods public void changeFlashMode(View v) { switch (flashMode) { case Camera.Parameters.FLASH_MODE_ON : flashMode = Camera.Parameters.FLASH_MODE_AUTO; break; case Camera.Parameters.FLASH_MODE_AUTO : flashMode = Camera.Parameters.FLASH_MODE_OFF; break; case Camera.Parameters.FLASH_MODE_OFF : flashMode = Camera.Parameters.FLASH_MODE_ON;; break; } SharedPreferences sharedPref = getSharedPreferences(getString(R.string.flashMode), Context.MODE_PRIVATE); SharedPreferences.Editor editor = sharedPref.edit(); editor.putString(getString(R.string.flashMode), flashMode); editor.commit(); setFlashButton(); } public void setFlashButton() { switch (flashMode) { case Camera.Parameters.FLASH_MODE_ON : flashButton.setImageDrawable(getResources().getDrawable(R.drawable.camera_flash_on)); break; case Camera.Parameters.FLASH_MODE_AUTO : flashButton.setImageDrawable(getResources().getDrawable(R.drawable.camera_flash_auto)); break; case Camera.Parameters.FLASH_MODE_OFF : flashButton.setImageDrawable(getResources().getDrawable(R.drawable.camera_flash_off)); break; } } //endregion //region Bearing Methods /** * Method that gives a cardinal direction based on the current bearing to the true north * #param bearing is the bearing to the true north * #return cardinal direction that belongs to the bearing */ private String bearingToString(Double bearing) { String strHeading = "?"; if (isBetween(bearing,-180.0,-157.5)) { strHeading = "South"; } else if (isBetween(bearing,-157.5,-112.5)) { strHeading = "SouthWest"; } else if (isBetween(bearing,-112.5,-67.5)) { strHeading = "West"; } else if (isBetween(bearing,-67.5,-22.5)) { strHeading = "NorthWest"; } else if (isBetween(bearing,-22.5,22.5)) { strHeading = "North"; } else if (isBetween(bearing,22.5,67.5)) { strHeading = "NorthEast"; } else if (isBetween(bearing,67.5,112.5)) { strHeading = "East"; } else if (isBetween(bearing,112.5,157.5)) { strHeading = "SouthEast"; } else if (isBetween(bearing,157.5,180.0)) { strHeading = "South"; } return strHeading; } /** * Method that checks if a certain number is in a certain range of numbers * #param x is the number to check * #param lower is the number that defines the lower boundary of the number range * #param upper is the number that defines the upper boundary of the number range * #return true if the number is between the other numbers, false otherwise */ private boolean isBetween(double x, double lower, double upper) { return lower <= x && x <= upper; } /* Method that triggers when the bearing changes, it sets the current bearing and sends an updated context to the provider */ #Override public void onBearingChanged(double bearing) { this.bearing = bearing; mBearingProvider.setContext(this); ImageView image = (ImageView) findViewById(R.id.ivCompass); // create a rotation animation (reverse turn degree degrees) if (bearing < 0) { bearing += 360; } RotateAnimation ra = new RotateAnimation((float)currentBearing,(float)-bearing, Animation.RELATIVE_TO_SELF, 0.5f,Animation.RELATIVE_TO_SELF,0.5f); // how long the animation will take place ra.setDuration(210); // set the animation after the end of the reservation status ra.setFillAfter(true); // Start the animation image.startAnimation(ra); currentBearing = -bearing; mBearingProvider.setContext(this); } //endregion } EDIT 2: I have made a small change to the onBearingChanged Method and now the compass is still visible, but thinks it's invisible and isn't moving because of my new if statement #Override public void onBearingChanged(double bearing) { this.bearing = bearing; mBearingProvider.setContext(this); ImageView image = (ImageView) findViewById(R.id.ivCompass); if (image.getVisibility() == View.VISIBLE) { // create a rotation animation (reverse turn degree degrees) if (bearing < 0) { bearing += 360; } RotateAnimation ra = new RotateAnimation((float) currentBearing, (float) -bearing, Animation.RELATIVE_TO_SELF, 0.5f, Animation.RELATIVE_TO_SELF, 0.5f); // how long the animation will take place ra.setDuration(210); // set the animation after the end of the reservation status ra.setFillAfter(true); // Start the animation image.startAnimation(ra); currentBearing = -bearing; } mBearingProvider.setContext(this); }
If you have some kind of animation, the animation probably intefieres on the calling to invisibility. Try with this just before calling INVISIBLE: findViewById(R.id.ivCompass).clearAnimation();