Please, can anybody help me with this?
I have a project using CameraBridgeViewBase and I want to take a picture and save a file, but CameraBridgeViewBase doesn't implement the function takePicture.
Result: I implemented a new class extends JavaCameraView and implemented myself the function takePicture.
import org.opencv.android.JavaCameraView;
import java.io.FileOutputStream;
import java.util.List;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.Size;
import android.util.AttributeSet;
import android.util.Log;
public class MyCameraView extends JavaCameraView implements PictureCallback {
private static final String TAG = "myCameraView";
private String mPictureFileName;
public MyCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public List<String> getEffectList() {
return mCamera.getParameters().getSupportedColorEffects();
}
public boolean isEffectSupported() {
return (mCamera.getParameters().getColorEffect() != null);
}
public String getEffect() {
return mCamera.getParameters().getColorEffect();
}
public void setEffect(String effect) {
Camera.Parameters params = mCamera.getParameters();
params.setColorEffect(effect);
mCamera.setParameters(params);
}
public List<Size> getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}
public void setResolution(Size resolution) {
disconnectCamera();
mMaxHeight = resolution.height;
mMaxWidth = resolution.width;
connectCamera(getWidth(), getHeight());
}
public Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}
public void takePicture(final String fileName) {
Log.i(TAG, "Taking picture");
this.mPictureFileName = fileName;
// Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
// Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
mCamera.setPreviewCallback(null);
// PictureCallback is implemented by the current class
mCamera.takePicture(null, null, this);
}
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "Saving a bitmap to file");
// The camera preview was automatically stopped. Start it again.
mCamera.startPreview();
mCamera.setPreviewCallback(this);
// Write the image in a file (in jpeg format)
try {
FileOutputStream fos = new FileOutputStream(mPictureFileName);
fos.write(data);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
}
-
private MyCameraView mOpenCvCameraView;
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mOpenCvCameraView = (MyCameraView) view.findViewById(R.id.fd_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
return view;
}
#Override
public void onResume() {
super.onResume();
mOpenCvCameraView.enableView();
String filename = "teste.jpg";
mOpenCvCameraView.takePicture(filename);
mOpenCvCameraView.disableView();
}
Related
Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 4 years ago.
Improve this question
I want capture periodically photo of camera from a service and send throught socket to server application (desktop software), then i tried make this with code below, but only a capture is done and not continues capturing.
Based in this answer i must define startPreview() before of capture with takePicture() then i'm already making this, but even so not works.
Someone could help me please?
package com.example.vrs.myexampleapp;
import android.Manifest;
import android.app.Activity;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.support.v4.app.ActivityCompat;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.WindowManager;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.List;
import static android.os.Environment.getExternalStorageDirectory;
#SuppressWarnings("deprecation")
public class MyCamera extends Service {
public final int DONE = 1;
public final int NEXT = 2;
public final int PERIOD = 1000;
private Camera camera;
private Timer timer;
private int cameraId = 0;
SurfaceHolder previewHolder;
public class Timer extends AsyncTask<Void, Void, Void> {
Context mContext;
private Handler threadHandler;
public Timer(Context context, Handler threadHandler) {
super();
this.threadHandler = threadHandler;
mContext = context;
}
#Override
protected Void doInBackground(Void... params) {
try {
Thread.sleep(PERIOD);
Message.obtain(threadHandler, DONE, "").sendToTarget();
} catch (InterruptedException e) {
e.printStackTrace();
}
return null;
}
}
private int findFrontFacingCamera() {
int cameraId = -1;
int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; i++) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
Log.i("MyCamera", "Camera found");
cameraId = i;
break;
}
}
return cameraId;
}
public void startCamera() {
if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA)
== PackageManager.PERMISSION_GRANTED && getPackageManager()
.hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
cameraId = findFrontFacingCamera();
if (cameraId < 0) {
Log.i("MyCamera", "No front facing camera found.");
} else {
safeCameraOpen(cameraId);
}
if (android.os.Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
SurfaceView dummy = new SurfaceView(this);
previewHolder = dummy.getHolder();
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
try {
camera.setPreviewDisplay(previewHolder);
} catch (IOException e1) {
e1.printStackTrace();
}
} else {
SurfaceTexture surfaceTexture = new SurfaceTexture(MODE_PRIVATE);
try {
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
}
camera.startPreview();
if (android.os.Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
try {
camera.setPreviewDisplay(previewHolder);
} catch (IOException e1) {
e1.printStackTrace();
}
}
Camera.Parameters params = camera.getParameters();
params.setJpegQuality(100);
camera.setParameters(params);
timer = new Timer(getApplicationContext(), threadHandler);
timer.execute();
}
}
public static DataOutputStream dos;
public static byte[] array;
private Handler threadHandler = new Handler() {
public void handleMessage(android.os.Message msg) {
switch (msg.what) {
case DONE:
camera.startPreview();
camera.takePicture(null, null, mCall);
break;
case NEXT:
Log.i("MyCamera", "Here in NEXT!!");
timer = new Timer(getApplicationContext(), threadHandler);
timer.execute();
break;
}
}
};
Camera.PictureCallback mCall = new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Log.i("MyCamera", "Here in PictureCallback");
if (data != null) {
Matrix mtx = new Matrix();
mtx.postRotate(270);
Bitmap bitmapPicture = BitmapFactory.decodeByteArray(data, 0, data.length);
bitmapPicture = Bitmap.createScaledBitmap(bitmapPicture, 360, 360, true);
Bitmap rotatedBMP = Bitmap.createBitmap(bitmapPicture, 0, 0, bitmapPicture.getWidth(), bitmapPicture.getHeight(), mtx, true);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try {
rotatedBMP.compress(Bitmap.CompressFormat.JPEG, 100, bos);
array = Methods.compress(bos.toByteArray());
new ConnAsyncTask().execute();
Message.obtain(threadHandler, NEXT, "").sendToTarget(); // Capture a new photo
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
static class ConnAsyncTask extends AsyncTask<Void, Void, Void> {
protected Void doInBackground(Void... params) {
try {
dos = new DataOutputStream(SocketBackgroundService.yclientSocket.getOutputStream());
dos.writeInt(array.length);
dos.write(array, 0, array.length);
dos.flush();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}
private boolean safeCameraOpen(int id) {
boolean qOpened = false;
try {
stopCamera();
camera = Camera.open(id);
qOpened = (camera != null);
} catch (Exception e) {
Log.i("MyCamera", "failed to open Camera");
e.printStackTrace();
}
return qOpened;
}
public void stopCamera() {
if (camera != null) {
camera.stopPreview();
camera.release();
camera = null;
}
}
public static MyCamera instance;
#Override
public void onCreate() {
super.onCreate();
Log.i("MyCamera", "Service created!!!");
instance = this;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.i("MyCamera", "onStartCommand() service started!!!");
instance = this;
return START_STICKY;
}
#Override
public void onDestroy() {
super.onDestroy();
stopCamera();
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
}
I'm not entirely sure why you're using an AsyncTask (your Timer class) for a delay.
Replace that with a simple handler.postDelayed().
I am creating an app where i can use the camera inside it without going to the default app of the phone.So i have used a surface view and tried to save it onto the phone, but the onPictureTaken never gets called.The complete code is shown below:
package com.example.surfaceviewcamera;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.hardware.Camera;
import android.os.Bundle;
import android.app.Activity;
import android.content.Context;
import android.content.ContextWrapper;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.Toast;
public class MainActivity extends Activity implements SurfaceHolder.Callback {
SurfaceView mSurfaceView;
SurfaceHolder mSurfaceHolder;
Camera mCamera;
boolean mPreviewRunning;
Button btncapture;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
btncapture=(Button) findViewById(R.id.btncapture);
mSurfaceView = (SurfaceView) findViewById(R.id.surface_camera);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
btncapture.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
public void onPictureTaken(byte[] imageData, Camera c) {
Bitmap bitmap = BitmapFactory.decodeByteArray(imageData , 0, imageData .length);
String file_path=saveToInternalSorage(bitmap);
Toast.makeText(getApplicationContext(),"Image stored succesfully at "+file_path,Toast.LENGTH_LONG).show();
}
};
}
});
}
private String saveToInternalSorage(Bitmap bitmapImage){
ContextWrapper cw = new ContextWrapper(getApplicationContext());
// path to /data/data/yourapp/app_data/imageDir
File directory = cw.getDir("imageDir", Context.MODE_PRIVATE);
// Create imageDir
File mypath=new File(directory,"marina1.jpg");
FileOutputStream fos = null;
try {
fos = new FileOutputStream(mypath);
// Use the compress method on the BitMap object to write image to the OutputStream
bitmapImage.compress(Bitmap.CompressFormat.PNG, 100, fos);
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
return directory.getAbsolutePath();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
mCamera=Camera.open();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int w,
int h) {
if (mPreviewRunning) {
mCamera.stopPreview();
}
Camera.Parameters p = mCamera.getParameters();
p.setPreviewSize(w, h);
mCamera.setParameters(p);
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
mPreviewRunning = true;
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mCamera.stopPreview();
mPreviewRunning = false;
mCamera.release();
}
}
My XML file:
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical">
<SurfaceView
android:id="#+id/surface_camera"
android:layout_width="fill_parent"
android:layout_height="10dip"
android:layout_weight="1">
</SurfaceView>
<Button
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="CAPTURE IMAGE"
android:id="#+id/btncapture"
android:layout_gravity="center"
/>
</LinearLayout>
call camera.takePicture(null, null, callback); from onClick() and define the callback outside the onCreate()
for reference look into this link
public class MainActivity extends Activity implements SurfaceHolder.Callback {
SurfaceView mSurfaceView;
SurfaceHolder mSurfaceHolder;
Camera mCamera;
boolean mPreviewRunning;
Button btncapture;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
btncapture=(Button) findViewById(R.id.btncapture);
mSurfaceView = (SurfaceView) findViewById(R.id.surface_camera);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
btncapture.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
//take picture here
mCamera.takePicture(null, null, mPictureCallback);
}
});
}
Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
public void onPictureTaken(byte[] imageData, Camera c) {
Bitmap bitmap = BitmapFactory.decodeByteArray(imageData , 0, imageData .length);
String file_path=saveToInternalSorage(bitmap);
Toast.makeText(getApplicationContext(),"Image stored succesfully at "+file_path,Toast.LENGTH_LONG).show();
}
};
private String saveToInternalSorage(Bitmap bitmapImage){
ContextWrapper cw = new ContextWrapper(getApplicationContext());
// path to /data/data/yourapp/app_data/imageDir
File directory = cw.getDir("imageDir", Context.MODE_PRIVATE);
// Create imageDir
File mypath=new File(directory,"marina1.jpg");
FileOutputStream fos = null;
try {
fos = new FileOutputStream(mypath);
// Use the compress method on the BitMap object to write image to the OutputStream
bitmapImage.compress(Bitmap.CompressFormat.PNG, 100, fos);
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
return directory.getAbsolutePath();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
mCamera=Camera.open();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int w,
int h) {
if (mPreviewRunning) {
mCamera.stopPreview();
}
Camera.Parameters p = mCamera.getParameters();
p.setPreviewSize(w, h);
mCamera.setParameters(p);
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
mPreviewRunning = true;
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mCamera.stopPreview();
mPreviewRunning = false;
mCamera.release();
}
}
The following code takes a picture with the camera and works fine.
When I pause the application (by going back to the home menu) and restart the app, the camera image does not get saved anymore.
Even though I release and re-open the camera on pause/resume, I get the error when calling snapPicture: "Method called after release()".
This is my code, and the line 'Log.w("Error message: ", e.getLocalizedMessage());' displays the error.
Can anyone help me to solve this problem?
camerapreview.java
package com.example.testproject;
import java.io.IOException;
import android.content.Context;
import android.hardware.Camera;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class CameraPreview extends SurfaceView implements
SurfaceHolder.Callback {
private SurfaceHolder mSurfaceHolder;
private Camera mCamera;
// Constructor that obtains context and camera
#SuppressWarnings("deprecation")
public CameraPreview(Context context, Camera camera) {
super(context);
this.mCamera = camera;
this.mSurfaceHolder = this.getHolder();
this.mSurfaceHolder.addCallback(this);
this.mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
try {
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (IOException e) {
// left blank for now
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format,
int width, int height) {
// start preview with new settings
try {
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (Exception e) {
// intentionally left blank for a test
}
}
}
custom_cameraactivity.java
package com.example.testproject;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.sql.Date;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
public class Custom_CameraActivity extends Activity {
private Camera mCamera;
private CameraPreview mCameraPreview;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
public void setCameraAndCameraPreview(Camera mcamera,
CameraPreview mCameraPreview) {
this.mCamera = mcamera;
this.mCameraPreview = mCameraPreview;
}
public String snapPicture() {
TakePictureTaskAsync takePictureTask = new TakePictureTaskAsync();
takePictureTask.setCamera(mCamera);
Calendar cal = Calendar.getInstance();
String useThisTimeStamp = new SimpleDateFormat("HHmmss").format(cal
.getTime());
takePictureTask.configureTimestamp(useThisTimeStamp);
takePictureTask.execute();
return useThisTimeStamp;
}
/**
* Helper method to access the camera returns null if it cannot get the
* camera or does not exist
*
* #return
*/
}
class TakePictureTaskAsync extends AsyncTask<Void, Void, Void> {
private Camera mCamera;
private String myTimeStamp;
public void setCamera(Camera mCamera) {
this.mCamera = mCamera;
}
public void configureTimestamp(String timeStamp) {
this.myTimeStamp = timeStamp;
}
#Override
protected void onPostExecute(Void result) {
}
#Override
protected Void doInBackground(Void... params) {
try {
mCamera.takePicture(null, null, mPicture);
} catch (Exception e) {
Log.w("Error message: ", e.getLocalizedMessage());
//this is where the error "method called after release" comes in
}
return null;
}
PictureCallback mPicture = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile();
if (pictureFile == null) {
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(resizeImage(data));
fos.close();
} catch (FileNotFoundException e) {
} catch (IOException e) {
}
try {
HTTPPost postPictureToURL = new HTTPPost();
postPictureToURL.setTimestamp(myTimeStamp);
postPictureToURL.executeImagePOST();
} catch (Exception e) {
e.printStackTrace();
}
}
};
private File getOutputMediaFile() {
File mediaStorageDir = new File(
Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
"testdir");
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
return null;
}
}
File mediaFile;
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "cameraView_" + myTimeStamp + ".jpg");
return mediaFile;
}
byte[] resizeImage(byte[] input) {
Bitmap original = BitmapFactory.decodeByteArray(input, 0, input.length);
Bitmap resized = Bitmap.createScaledBitmap(original, 800, 600, true);
ByteArrayOutputStream blob = new ByteArrayOutputStream();
resized.compress(Bitmap.CompressFormat.JPEG, 70, blob);
return blob.toByteArray();
}
}
Mainactivity.java
#Override
protected void onPause() {
super.onPause();
try {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCameraPreview.getHolder().removeCallback(mCameraPreview);
mCamera.release();
mCamera = null;
} catch (Exception e) {
e.printStackTrace();
}
}
protected void onResume() {
super.onResume();
if (mCamera != null) {
mCamera.startPreview();
} else {
mCamera = getCameraInstance();
mCamera.startPreview();
}
}
private Camera getCameraInstance() {
Camera camera = null;
try {
camera = Camera.open();
} catch (Exception e) {
//do nothing
}
return camera;
}
I am trying to draw a background image in my android app. However when I try to draw it as a bitmap it says the file is not found. Can you use images from custom folders created in your project directory?
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MainGamePanel extends SurfaceView implements SurfaceHolder.Callback {
Bitmap BackgroundImage;
private static final String TAG = MainGamePanel.class.getSimpleName();
private MainThread thread;
public MainGamePanel(Context context) {
super(context);
// adding the callback (this) to the surface holder to intercept events
getHolder().addCallback(this);
// creating game thread
thread = new MainThread(getHolder(), this);
// make the GmaePanel focusable so it can handle events
setFocusable(true);
}
#Override
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
// TODO Auto-generated method stub
}
#Override
public void surfaceCreated(SurfaceHolder arg0) {
thread.setRunning(true);
thread.start();
Background b1 = new Background();
BackgroundImage = b1.loadBackgroundImage();
}
#Override
public void surfaceDestroyed(SurfaceHolder arg0) {
Log.d(TAG, "Surface is being destroyed");
boolean retry = true;
while(retry){
try{
thread.join();
retry = false;
} catch (InterruptedException e) {
// try again to shutdown thread
}
}
Log.d(TAG, "Thread was shut down cleanly");
}
#Override
public boolean onTouchEvent(MotionEvent event){
return super.onTouchEvent(event);
}
#Override
protected void onDraw(Canvas canvas){
canvas.drawBitmap(BackgroundImage, 0, 0, null);
}
}
------Background Class------
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class Background {
private String imageLoc;
public Background(){
}
public String getImageLoc() {
return imageLoc;
}
public void setImageLoc(String imageLoc) {
this.imageLoc = imageLoc;
}
public Bitmap loadBackgroundImage(){
Bitmap background = BitmapFactory.decodeFile("/Users/Justin/Documents/Project_WinterGalaxy/AndroidGalaxy/images/Background.png");
return background;
}
}
The way to show an image in your code is: put it in the relevant drawable folder and then load it like this:
Bitmap background = BitmapFactory.decodeResource(getResources(), R.drawable.Background);
My code get different behavior when the application runs on Android 2.2 and Android 3.0, code built for android 2.2 and provides no build errors in Eclipse. The Android 2.2 does not seem OnCompletionListener work as expected. Am I doing something wrong or is there any difference between Android 2.2 and 3.0 regarding OnCompletionListener.
My sample code is from http://developer.android.com/resources/samples/ApiDemos/src/com/example/android/apis/media/MediaPlayerDemo_Video.html
import android.app.Activity;
import android.content.Context;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnBufferingUpdateListener;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.media.MediaPlayer.OnVideoSizeChangedListener;
import android.os.Bundle;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;
import android.view.WindowManager;
public class MediaPlayerActivity extends Activity implements OnBufferingUpdateListener, OnCompletionListener, OnPreparedListener, OnVideoSizeChangedListener, SurfaceHolder.Callback {
private final String TAG = "MediaPlayer";
private SurfaceView m_preview;
private SurfaceHolder m_holder;
private Bundle m_extras;
private String m_path;
private MediaPlayer m_mediaPlayer;
private int m_videoWidht;
private int m_videoHeight;
private boolean m_isVideoReadyToPlay;
private boolean m_isVideoSizeKnown;
private static final int LOCAL_VIDEO = 1;
private static final int STREAM_VIDEO = 2;
private static final String MEDIA = "media";
private static final String FILE = "/mnt/sdcard/file1.avi";
private WakeLock m_keepscreen;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(TAG, "MediaPlayer::onCreate");
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.videoplayer);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
m_keepscreen = pm.newWakeLock( PowerManager.FULL_WAKE_LOCK | PowerManager.ON_AFTER_RELEASE | PowerManager.ACQUIRE_CAUSES_WAKEUP,"MediaPlayerActivity");
m_preview = (SurfaceView) findViewById(R.id.surface);
m_holder = m_preview.getHolder();
m_holder.addCallback(this);
m_holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
m_extras = getIntent().getExtras();
m_keepscreen.acquire();
}
private void playVideo(Integer media){
doCleanUp();
try{
switch (media){
case LOCAL_VIDEO :
m_path = FILE;
Log.i(TAG, "File: "+FILE);
if(m_path == ""){
//
}
break;
case STREAM_VIDEO:
Log.i(TAG, "PlayVideo, cant stream video yet");
break;
}
m_mediaPlayer = new MediaPlayer();
m_mediaPlayer.setDataSource(m_path);
m_mediaPlayer.setDisplay(m_holder);
m_mediaPlayer.prepare();
m_mediaPlayer.setOnBufferingUpdateListener(this);
m_mediaPlayer.setOnPreparedListener(this);
m_mediaPlayer.setOnCompletionListener(this);
/*
m_mediaPlayer.setOnCompletionListener(new OnCompletionListener(){
public void onCompletion(MediaPlayer mp) {
Log.i(TAG, "MediaPlayer::onCompletion");
startVideoPlayback();
}
});
*/
m_mediaPlayer.setOnVideoSizeChangedListener(this);
m_mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
}catch (Exception e){
Log.i(TAG, "Error: "+e.getMessage());
}
}
private void doCleanUp() {
m_videoWidht = 0;
m_videoHeight = 0;
m_isVideoReadyToPlay = false;
m_isVideoSizeKnown = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.i(TAG, "MediaPlayer::surfaceChanged");
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "MediaPlayer::surfaceCreated");
playVideo(1);
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "MediaPlayer::surfaceDestroyed");
}
#Override
public void onVideoSizeChanged(MediaPlayer mp, int width, int height) {
Log.i(TAG, "MediaPlayer::videoSizeChanged");
if( width == 0 || height == 0 ){
Log.i(TAG, "invalid video with("+width+")or height ("+height+")");
}
m_isVideoSizeKnown = true;
m_videoWidht = width;
m_videoHeight = height;
if(m_isVideoReadyToPlay && m_isVideoSizeKnown){
startVideoPlayback();
}
}
private void startVideoPlayback() {
Log.i(TAG, "StartVideoPlayback");
m_holder.setFixedSize(m_videoWidht, m_videoHeight);
m_mediaPlayer.start();
}
#Override
public void onPrepared(MediaPlayer mp) {
Log.i(TAG, "MediaPlayer::onPrepared");
m_isVideoReadyToPlay = true;
if(m_isVideoReadyToPlay && m_isVideoSizeKnown){
startVideoPlayback();
}
}
#Override
public void onCompletion(MediaPlayer mp) {
//android 2.2 never get here
Log.i(TAG, "MediaPlayer::onCompletion");
startVideoPlayback();
}
#Override
public void onBufferingUpdate(MediaPlayer mp, int percent) {
Log.i(TAG, "MediaPlayer::onBufferingUpdate: "+percent+" %");
}
#Override
protected void onPause() {
super.onPause();
releaseMediaPlayer();
doCleanUp();
}
#Override
protected void onDestroy() {
super.onDestroy();
releaseMediaPlayer();
doCleanUp();
}
private void releaseMediaPlayer() {
if (m_mediaPlayer != null) {
m_mediaPlayer.release();
m_mediaPlayer = null;
m_keepscreen.release();
}
}
}