I'm trying to cancle my AsyncTask when I click Back Button by using this code :
#Override
public void onBackPressed() {
super.onBackPressed();
if (mediaPlayer.isPlaying()) {
mediaPlayer.stop();
}
if(playerTask!=null && !playerTask.isCancelled()) playerTask.cancel(true);
this.finish();
}
But doesn't work playerTask still works :
PlayerTask playerTask = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_pop_for_ringtone);
mediaPlayer = new MediaPlayer();
playerTask = new PlayerTask();
playerTask.execute(url);
/***/
#SuppressLint("StaticFieldLeak")
public class PlayerTask extends AsyncTask<String, Void, Boolean> {
ProgressBar pb = findViewById(R.id.progress);
#Override
protected void onPreExecute() {
super.onPreExecute();
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
AudioAttributes attribs = new AudioAttributes.Builder().setUsage(AudioAttributes.USAGE_MEDIA).setContentType(AudioAttributes.CONTENT_TYPE_MUSIC).build();
mediaPlayer.setAudioAttributes(attribs);
} else {
mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
}
}
#Override
protected Boolean doInBackground(String... strings) {
if(!isCancelled()) {
try {
mediaPlayer.setDataSource(strings[0]);
mediaPlayer.prepare();
prepared = true;
} catch (IOException e) {
e.printStackTrace();
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
}
mediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mediaPlayer) {
mediaPlayer.start();
}
});
}
else
{
playerTask.cancel(true);
}
return prepared;
}
#Override
protected void onPostExecute(Boolean aBoolean) {
super.onPostExecute(aBoolean);
music.setEnabled(true);
music.setVisibility(View.VISIBLE);
music.setChecked(true);
}
}
Try this
#Override
public void onBackPressed() {
// if playerTask == null mediaPlayer is never start, no need to handle
if(playerTask != null && playerTask.getStatus() == AsyncTask.Status.FINISHED) {
if (mediaPlayer.isPlaying()) {
mediaPlayer.stop();
}
} else if (playerTask != null && playerTask.getStatus() != AsyncTask.Status.FINISHED) {
// It mean your task is running, should stop your mediaPlayer inside your task
playerTask.cancel(true);
}
super.onBackPressed();
}
And in your PlayerTask override onCancelled method
#Override
protected void onCancelled(Boolean aBoolean) {
if(isCancelled() && mediaPlayer.isPlaying()) {
mediaPlayer.stop();
}
}
Hope it help
You have 2 problem to change .
1.When you call super.onBackPressed(); before you canceled the playerTask ,your activity will destroy firstly .So you should call after playerTask canceled .
2.When you judged that the playTask is running , you also need to use playerTask.getStatus() == AsyncTask.Status.RUNNING .
Change
#Override
public void onBackPressed() {
super.onBackPressed();
if (mediaPlayer.isPlaying()) {
mediaPlayer.stop();
}
if(playerTask!=null && !playerTask.isCancelled()) playerTask.cancel(true);
this.finish();
}
to
#Override
public void onBackPressed() {
if (mediaPlayer.isPlaying()) {
mediaPlayer.stop();
}
if(playerTask!=null && !playerTask.isCancelled() && playerTask.getStatus() == AsyncTask.Status.RUNNING){
playerTask.cancel(true);
playerTask = null;
}
super.onBackPressed();
this.finish();
}
Referring to the docs for AsyncTask.cancel(boolean), AsyncTask cancel(boolean), we see that this method only sets the isCancelled() to give true and results in onCancelled() function to execute instead of onPostExecute(). So if you need to cancel the AsyncTask properly you need to add a flag to periodically check if(!isCancelled()) in doInBackground() and stop execution. Your code would look something like this
#Override
protected Boolean doInBackground(String... strings) {
if(!isCancelled()){
//ToDo:whatever you need to do in the asynctask
}
else{
//ToDo: cancel the execution
}
}
}
MediaPlayer mediaPlayer = new MediaPlayer();
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
AudioAttributes attribs = new AudioAttributes.Builder().setUsage(AudioAttributes.USAGE_MEDIA).setContentType(AudioAttributes.CONTENT_TYPE_MUSIC).build();
mediaPlayer.setAudioAttributes(attribs);
} else {
mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
}
mediaPlayer.setDataSource(url);
mediaPlayer.prepareAsync();
mediaPlayer.setOnPreparedListener(new OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mediaPlayer) {
mediaPlayer.start();
music.setEnabled(true);
music.setVisibility(View.VISIBLE);
music.setChecked(true);
}
});
And then onBackPressed() you can do mediaPlayer.release()
I have created the wave while media play with following code,
and stop the player while clicking on the back button or stop button.
Copy in xml file.
<VisualizerView
android:id="#+id/waveform_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#android:color/white"/>
MainActivity.java
private MediaPlayer myMediaPlayer;
private VisualizerView mWaveformView;
Visualizer audioOutput = null;
onCreate()
{
mWaveformView = (VisualizerView) findViewById(R.id.waveform_view);
myButtonPlayLastRecordAudio.setOnClickListener(this);
}
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.buttonPlay:
myAudioSavePathInDevice = Environment.getExternalStorageDirectory().toString() + "/" +
"AudioRecording.3gp";
myMediaPlayer = new MediaPlayer();
myMediaPlayer.setVolume(1.5f, 1.5f);
if (myAudioSavePathInDevice == null || myAudioSavePathInDevice.isEmpty()) {
Toast.makeText(MainActivity.this, "No Audio Found", Toast.LENGTH_LONG).show();
} else {
try {
FileInputStream is = new FileInputStream(myAudioSavePathInDevice);
myMediaPlayer.setDataSource(is.getFD());
myMediaPlayer.prepare();
myMediaPlayer.start();
Toast.makeText(AudioRecordingActivity.this, "Recording Playing", Toast.LENGTH_LONG).show();
mWaveformView.clear();
createVisualizer();
} catch (IOException e) {
e.printStackTrace();
}
myMediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
audioOutput.setEnabled(false);
}
});
}
break;
case R.id.buttonStopAudio:
mediaPlayerReleaseResources();
break;
}
}
private void createVisualizer() {
int rate = Visualizer.getMaxCaptureRate();
audioOutput = new Visualizer(myMediaPlayer.getAudioSessionId()); // get output audio stream
audioOutput.setDataCaptureListener(new Visualizer.OnDataCaptureListener() {
#Override
public void onWaveFormDataCapture(Visualizer visualizer, byte[] waveform, int samplingRate) {
int max = 0, min = 255;
for (int i = 0; i < waveform.length; i++) {
int w = (int) waveform[i] & 0xFF;
max = Math.max(w, max);
min = Math.min(w, min);
}
mWaveformView.addAmplitude((max - min)); // update the VisualizeView
mWaveformView.invalidate(); // refresh the VisualizerView
}
#Override
public void onFftDataCapture(Visualizer visualizer, byte[] fft, int samplingRate) {
}
}, rate, true, false); // waveform not freq data
audioOutput.setEnabled(true);
}
private void mediaPlayerReleaseResources() {
if(myMediaPlayer != null) {
if (myMediaPlayer.isPlaying()) {
myMediaPlayer.stop();
}
}
if( audioOutput != null)
audioOutput.setEnabled(false);
}
#Override
public void onBackPressed() {
super.onBackPressed();
mediaPlayerReleaseResources();
}
VisualizerView.class
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.view.View;
import java.util.ArrayList;
import java.util.List;
public class VisualizerView extends View {
private static final int LINE_WIDTH = 3; // width of visualizer lines
private static final int LINE_SCALE = 4; // scales visualizer lines
private List<Float> amplitudes; // amplitudes for line lengths
private int width; // width of this View
private int height; // height of this View
private Paint linePaint; // specifies line drawing characteristics
private float temp_scale = 3;
float heighest = temp_scale;
// constructor
public VisualizerView(Context context, AttributeSet attrs) {
super(context, attrs); // call superclass constructor
linePaint = new Paint(); // create Paint for lines
linePaint.setColor(Color.BLACK); // set color to green
linePaint.setStrokeWidth(LINE_WIDTH); // set stroke width
}
// called when the dimensions of the View change
#Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
width = w; // new width of this View
height = h; // new height of this View
amplitudes = new ArrayList<Float>(width / LINE_WIDTH);
}
// clear all amplitudes to prepare for a new visualization
public void clear() {
heighest = 3;
amplitudes.clear();
}
// add the given amplitude to the amplitudes ArrayList
public void addAmplitude(float amplitude) {
amplitudes.add(amplitude); // add newest to the amplitudes ArrayList
// if the power lines completely fill the VisualizerView
if (amplitudes.size() * LINE_WIDTH >= width) {
amplitudes.remove(0); // remove oldest power value
}
}
// draw the visualizer with scaled lines representing the amplitudes
#Override
public void onDraw(Canvas canvas) {
int middle = height / 2; // get the middle of the View
float curX = 0; // start curX at zero
// for each item in the amplitudes ArrayList
for (float power : amplitudes) {
heighest = Math.max(power, heighest);
temp_scale = heighest / height;
float scaledHeight = power / temp_scale; // LINE_SCALE; // scale the power
curX += (LINE_WIDTH); // increase X by LINE_WIDTH
// draw a line representing this item in the amplitudes ArrayList
canvas.drawLine(curX, middle + scaledHeight / 2, curX, middle
- scaledHeight / 2, linePaint);
}
}
}
Related
Im creating an app in android studio where I want 10 clips to be played at the same time side by side. Im having some problems with some lags already at three clips and I wounder if Im better off using threads? In that case how?
Any hint would be very much apreciated
Here is my code so far. I know it is not very efficient and I am better off using an array of a player object for example but Im just testing so far:
public class MainActivity extends AppCompatActivity implements TextureView.SurfaceTextureListener {
private MediaPlayer mp1, mp2, mp3;
private TextureView tv1, tv2, tv3;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
tv1 = findViewById(R.id.textureView1);
tv2 = findViewById(R.id.textureView2);
tv3 = findViewById(R.id.textureView3);
tv1.setSurfaceTextureListener(this);
tv2.setSurfaceTextureListener(this);
tv3.setSurfaceTextureListener(this);
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
Surface surface = new Surface(surfaceTexture);
mp1 = MediaPlayer.create(this, R.raw.a7);
mp1.setSurface(surface);
// mp1.prepareAsync(); //
mp1.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mp1) {
mp1.start();
}
});
Surface surface2 = new Surface(surfaceTexture);
mp2 = MediaPlayer.create(this, R.raw.a9);
mp2.setSurface(surface2);
// mp1.prepareAsync(); //
mp2.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mp2) {
mp2.start();
}
});
Surface surface3 = new Surface(surfaceTexture);
mp3 = MediaPlayer.create(this, R.raw.a10);
mp3.setSurface(surface3);
// mp1.prepareAsync(); //
mp3.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mp3) {
mp3.start();
}
});
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
#Override
protected void onPause() {
if (mp1 != null && mp1.isPlaying()) {
mp1.pause();
}
super.onPause();
}
#Override
protected void onResume() {
if (mp1 != null) {
mp1.start();
}
super.onResume();
}
#Override
protected void onDestroy() {
if (mp1 != null) {
mp1.stop();
mp1.release();
mp1 = null;
}
super.onDestroy();
}
}
You should play media in a different non-ui thread. like this:-
public class MediaService extends Service {
private MediaPlayer mp1, mp2, mp3;
private static final String ACTION_START = TAG + ".ACTION_START";
private IBinder mBinder = new MyBinder();
private MediaPlayer.OnPreparedListener mMediaPrepared = new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mp) {
Log.d(TAG, "MediaPlayer.onPrepared");
onCommandPlay(mp);
}
};
#Override
public IBinder onBind(Intent intent) {
Log.v(TAG, "onBind");
return mBinder;
}
#Override
public void onCreate() {
super.onCreate();
m1 = MediaPlayer.create(this, R.raw.a1);
m2 = MediaPlayer.create(this, R.raw.a2);
m3 = MediaPlayer.create(this, R.raw.a9);
}
#Override
public void onDestroy() {
super.onDestroy();
if (m1 != null) m1 .release();
if (m2 != null) m2 .release();
if (m3 != null) m3 .release();
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
final String action = intent.getAction();
Log.d(TAG, "onStartCommand: " + intent.getAction());
if (ACTION_START.equals(action)) {
onCommandStart();
return START_STICKY;
}
stopSelf();
return Service.START_STICKY_COMPATIBILITY;
}
/**
* Performs actions related to media player when Service onStartCommand method is called
*
*/
private void onCommandStart() {
// Create Notifications with remote views
mNotification = new NotificationCompat.Builder(this).setTicker("Media Service started...")
.setSmallIcon(R.mipmap.ic_launcher)
.setContent(collapsed)
.setAutoCancel(false)
.setOngoing(true)
.build();
startForeground(NOTIFICATION_ID, mNotification);
startPlaying();
}
private void onCommandPlay(MediaPlayer mp) {
try {
mp.start();
} catch (IllegalStateException e) {
Log.e(TAG, "onCommandPlay", e);
}
}
/**
* Start playing the provided media item
*
*/
private void startPlaying() {
mCurrent = item;
try {
mp1.reset();
mp1.setOnPreparedListener(mMediaPrepared);
mp2.reset();
mp2.setOnPreparedListener(mMediaPrepared);
mp3.reset();
mp3.setOnPreparedListener(mMediaPrepared);
AssetFileDescriptor afd1 = getResources().openRawResourceFd(getResources().openRawResourceFd(R.raw.a9););
AssetFileDescriptor afd2 = getResources().openRawResourceFd(getResources().openRawResourceFd(R.raw.a10););
AssetFileDescriptor afd3 = getResources().openRawResourceFd(getResources().openRawResourceFd(R.raw.a8););
mp1.setDataSource(afd1 .getFileDescriptor(), afd1 .getStartOffset(), afd1.getLength());
mp2.setDataSource(afd2 .getFileDescriptor(), afd2 .getStartOffset(), afd2 .getLength());
mp3.setDataSource(afd3 .getFileDescriptor(), afd3 .getStartOffset(), afd3 .getLength());
mp1.prepareAsync();
mp2.prepareAsync();
mp3.prepareAsync();
} catch (IOException e) {
Log.e(TAG, "startPlaying", e);
}
}
public class MyBinder extends Binder {
public MediaService getService() {
return MediaService.this;
}
}
}
then start the service from Activity like this:
Intent intent = new Intent(context, MediaService.class);
intent.setAction(ACTION_START);
startServie(intent);
You should also handle different media playing use-cases. You can refer this link for more.
Currently, I'm developing a media player and I need seekBar to work together, at the moment, it's everything okay! but, when I touch at determinated position of the seekbar, the mediaplayer must follow and set the song to this determinated position, but the song and seekBar back to the start and I don't know why it's happening.
MainActivity.java
public class MainActivity extends AppCompatActivity {
private Handler mHandler = new Handler();
public TextView song_detail;
public TextView time1;
public TextView time2;
private String player_status = "playing";
private ImageButton player_img;
public static SeekBar seekBar;
private static MediaPlayer mediaPlayer;
#SuppressLint("RestrictedApi")
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
song_detail = findViewById(R.id.song_detail);
song_detail.setVisibility(View.GONE);
time1 = findViewById(R.id.time_1);
time2 = findViewById(R.id.time_2);
seekBar = findViewById(R.id.seekBar);
final Runnable mRunnable = new Runnable() {
#Override
public void run() {
if(mediaPlayer != null){
int CurrentPosition = mediaPlayer.getCurrentPosition();
String m_1;
String s_1;
seekBar.setProgress(CurrentPosition/1000);
final int minutes_1 = (CurrentPosition/1000)/60;
final int seconds_1 = ((CurrentPosition/1000)%60);
if (minutes_1 < 10) {
m_1 = "0" + minutes_1;
} else {
m_1 = "" + minutes_1;
}
if (seconds_1 < 10) {
s_1 = "0" + seconds_1;
} else {
s_1 = "" + seconds_1;
}
time1.setText(m_1 + ":" + s_1);
int Duration = mediaPlayer.getDuration();
String m_2;
String s_2;
final int minutes_2 = (Duration/1000)/60;
final int seconds_2 = ((Duration/1000)%60);
if (minutes_2 < 10) {
m_2 = "0" + minutes_2;
} else {
m_2 = "" + minutes_2;
}
if (seconds_2 < 10) {
s_2 = "0" + seconds_2;
} else {
s_2 = "" + seconds_2;
}
time2.setText(m_2 + ":" + s_2);
}
mHandler.postDelayed(this, 1000);
}
};
mRunnable.run();
seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
#Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if(mediaPlayer != null && fromUser){
Toast.makeText(getApplicationContext(), String.valueOf(progress), Toast.LENGTH_LONG).show();
mediaPlayer.seekTo(progress);
seekBar.setProgress(progress);
}
}
#Override
public void onStartTrackingTouch(SeekBar seekBar) { }
#Override
public void onStopTrackingTouch(SeekBar seekBar) { }
});
}
public void initAudio(final Context context, final String url) {
if (mediaPlayer == null) {
mediaPlayer = MediaPlayer.create(context, Uri.parse(url));
}
mediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
//Toast.makeText(context, "TEST", Toast.LENGTH_LONG).show();
killMediaPlayer();
}
});
seekBar.setMax(mediaPlayer.getDuration()/1000);
mediaPlayer.start();
}
public void killMediaPlayer() {
if (mediaPlayer != null) {
try {
mediaPlayer.reset();
mediaPlayer.release();
mediaPlayer = null;
} catch (Exception e) {
e.printStackTrace();
}
}
}
public static void pauseAudio() {
if (!(mediaPlayer == null)) {
mediaPlayer.pause();
}
}
public static void startAudio() {
if (!(mediaPlayer == null)) {
mediaPlayer.start();
}
}
}
This is the part of code that I'm using to work with mediaPlayer and seekBar
seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
#Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if(mediaPlayer != null && fromUser){
Toast.makeText(getApplicationContext(), String.valueOf(progress), Toast.LENGTH_LONG).show();
mediaPlayer.seekTo(progress);
seekBar.setProgress(progress);
}
}
#Override
public void onStartTrackingTouch(SeekBar seekBar) { }
#Override
public void onStopTrackingTouch(SeekBar seekBar) { }
});
}
But as I have said, instead of mediaPlayer seek and continue the audio from the position that I've clicked, the audio simply back to the start. What I'm doing wrong?
Thank you.
The max value for progress is 100. The seekTo function takes time in milliseconds. This is why it seeks to the start (almost) of the audio.
So instead of mediaPlayer.seekTo(progress);, you need to do:
long newTime = (progress/100.0) * total_audio_duration_in_millisecond;
mediaPlayer.seekTo(newTime);
this is my code, i am almost certain the problem is with the while loop.
public void start(View view) {
int currentPosition = 0;
player = MediaPlayer.create(this, R.raw.sound);
player.start();
int total = player.getDuration();
progressBar.setProgress(0);
progressBar.setMax(player.getDuration());
while (player != null && currentPosition < total) {
// try {
// Thread.sleep(500);
currentPosition = player.getCurrentPosition();
// } catch (InterruptedException e) {
// return;
// } catch (Exception e) {
// return;
// }
progressBar.setProgress(currentPosition);
}
}
public void stop(View view) {
player.stop();
}
whether or not i have the sleep intervals my result is the same; the sound starts playing but i cant stop it, and the progress bar doesn't move.
i would appreciate some help
I used this code in my app for using seekBar with MediaPlayer. It implements Runnable to keep the seekBar updating as the music plays. Take a look at the code:
public class MainActivity extends Activity implements Runnable,
OnClickListener, OnSeekBarChangeListener {
private SeekBar seekBar;
private Button startMedia;
private Button stopMedia;
private MediaPlayer mp;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
seekBar = (SeekBar) findViewById(R.id.seekBar1);
startMedia = (Button) findViewById(R.id.button1);
stopMedia = (Button) findViewById(R.id.button2);
startMedia.setOnClickListener(this);
stopMedia.setOnClickListener(this);
seekBar.setOnSeekBarChangeListener(this);
seekBar.setEnabled(false);
}
public void run() {
int currentPosition = mp.getCurrentPosition();
int total = mp.getDuration();
while (mp != null && currentPosition < total) {
try {
Thread.sleep(1000);
currentPosition = mp.getCurrentPosition();
} catch (InterruptedException e) {
return;
} catch (Exception e) {
return;
}
seekBar.setProgress(currentPosition);
}
}
public void onClick(View v) {
if (v.equals(startMedia)) {
if (mp == null) {
mp = MediaPlayer.create(getApplicationContext(), R.raw.song2);
seekBar.setEnabled(true);
}
if (mp.isPlaying()) {
mp.pause();
startMedia.setText("play");
} else {
mp.start();
startMedia.setText("pause");
seekBar.setMax(mp.getDuration());
new Thread(this).start();
}
}
if (v.equals(stopMedia) && mp != null) {
if (mp.isPlaying() || mp.getDuration() > 0) {
mp.stop();
mp = null;
startMedia.setText("play");
seekBar.setProgress(0);
}
}
}
public void onProgressChanged(SeekBar seekBar, int progress,
boolean fromUser) {
try {
if (mp.isPlaying() || mp != null) {
if (fromUser)
mp.seekTo(progress);
} else if (mp == null) {
Toast.makeText(getApplicationContext(), "Media is not running",
Toast.LENGTH_SHORT).show();
seekBar.setProgress(0);
}
} catch (Exception e) {
Log.e("seek bar", "" + e);
seekBar.setEnabled(false);
}
}
#Override
public void onStartTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub
}
#Override
public void onStopTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub
}
}
Thanks to: Sridhar Kulkarni
i want to reset the seekbar to start position once the music is played and button showing "play"...The problem with the code is that the seek bar stays at the end position after its done playing the music and button still shows "pause"...
Here's my code
ekdanta.java
public class ekdanta extends AppCompatActivity implements Runnable, View.OnClickListener,SeekBar.OnSeekBarChangeListener {
TextView tv4;
Button b9, b10,but19;
int count = 0;
MediaPlayer play;
SeekBar seek_bar;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_ekdanta);
tv4 = (TextView) findViewById(R.id.textView9);
tv4.setTextSize((float)21.5);
tv4.setText(Html.fromHtml(getString(R.string.thirteen)));
b9 = (Button) findViewById(R.id.b9);
b10 = (Button) findViewById(R.id.b10);
seek_bar = (SeekBar) findViewById(R.id.seekBar);
seek_bar.setOnSeekBarChangeListener(this);
seek_bar.setEnabled(false);
but19 = (Button) findViewById(R.id.button19);
but19.setOnClickListener(this);
}
public void run() {
int currentPosition= play.getCurrentPosition();
int total = play.getDuration();
while (play!=null && currentPosition<total) {
try {
Thread.sleep(1000);
currentPosition= play.getCurrentPosition();
} catch (InterruptedException e) {
return;
} catch (Exception e) {
return;
}
seek_bar.setProgress(currentPosition);
}
}
public void onClick(View v) {
if (v.equals(but19)) {
if (play == null) {
play = MediaPlayer.create(getApplicationContext(), R.raw.ekadanta);
seek_bar.setEnabled(true);
}
if (play.isPlaying()) {
play.pause();
but19.setText("Play");
} else {
play.start();
but19.setText("Pause");
seek_bar.setMax(play.getDuration());
new Thread(this).start();
}
}
}
#Override
protected void onPause() {
if(play!=null){
play.stop();
}
super.onPause();
}
public void increase(View inc) {
count++;
if (count == 1) {
tv4.setTextSize(25);
} else if (count == 2) {
tv4.setTextSize(30);
} else if (count >= 3) {
count = 3;
tv4.setTextSize(40);
}
}
public void decrease(View dec) {
count--;
if (count <= 0) {
tv4.setTextSize((float)21.5);
count = 0;
}
if (count == 1) {
tv4.setTextSize(25);
} else if (count == 2) {
tv4.setTextSize(30);
} else if (count == 3) {
tv4.setTextSize(40);
}
}
#Override
public void onProgressChanged(SeekBar seek_bar, int progress, boolean fromUser) {
try{
if(play.isPlaying()||play!=null){
if (fromUser)
play.seekTo(progress);
}
else if(play==null){
Toast.makeText(getApplicationContext(),"First Play", Toast.LENGTH_SHORT).show();
seek_bar.setProgress(0);
}
}
catch(Exception e){
Log.e("seek bar",""+e);
seek_bar.setEnabled(false);
}
}
#Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
#Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
}
MediaPlayer has got onCompletionListener(), you can use that. Your code with handlers is not too reliable, it would be better to refactor it.
mMediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
// do whatever you want
}
});
I am trying to develop an application in Android with the following feature: record video and audio seamlessly, even if the user has another application in the foreground. A common scenario for it would be: the user opens the app, starts recording, then opens a navigation app or receives a call. I want my app to keep recording.
I have put together some code, mainly inspired by this tutorial, which will be quoted below. I have encountered two problems, however:
1. When I press the "home" key, video recording freezes, but sound is fine
2. When I navigate back to the app, the preview is black
My questions are:
Is my goal possible on Android?
What am I doing wrong?
My code:
public class GlobalState extends Application {
private boolean recording = false;
private boolean loggingEnabled = true;
private Camera serviceCamera = null;
private CameraPreview cameraPreview = null;
#Override
public void onCreate() {
try {
serviceCamera = Camera.open();
} catch (Exception e) {
}
super.onCreate();
}
public boolean isRecording() {
return recording;
}
public boolean isLoggingEnabled() {
return loggingEnabled;
}
public void setRecording(boolean recording) {
this.recording = recording;
}
public void setCamera(Camera serviceCamera) {
this.serviceCamera = serviceCamera;
}
public Camera getCamera() {
return serviceCamera;
}
public void setCameraPreview(CameraPreview cameraPreview) {
this.cameraPreview = cameraPreview;
}
public CameraPreview getCameraPreview() {
return this.cameraPreview;
}
}
public class CameraPreview extends SurfaceView implements
SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
private static final String TAG = "CameraPreview";
public CameraPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (IOException e) {
Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (mHolder.getSurface() == null) {
return;
}
try {
mCamera.stopPreview();
} catch (Exception e) {
}
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
}
public class MainActivity extends Activity {
public String TAG = "DE-MainActivity";
ImageView mRecordView;
ImageView mMenuButtonView;
LinearLayout mMenuView;
TextView mVideosTextView;
TextView mSettingsTextView;
private Camera mCamera;
private CameraPreview mPreview;
GlobalState mAppState = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mAppState = (GlobalState) getApplicationContext();
if (mAppState.isLoggingEnabled()) {
Log.v(TAG, "Activity: onCreate");
}
mCamera = mAppState.getCamera();
if (mAppState.getCameraPreview() == null) {
mPreview = new CameraPreview(this, mCamera);
mAppState.setCameraPreview(mPreview);
}
FrameLayout preview = (FrameLayout) findViewById(R.id.fl_camera);
preview.addView(mPreview);
mMenuView = (LinearLayout) findViewById(R.id.ll_menu_list);
mVideosTextView = (TextView) findViewById(R.id.tv_menu_item_videos);
mSettingsTextView = (TextView) findViewById(R.id.tv_menu_item_settings);
mRecordView = (ImageView) findViewById(R.id.iv_record);
mRecordView.setImageResource(R.drawable.btn_not_recording);
mRecordView.setAlpha((float) 0.5);
mRecordView.bringToFront();
mRecordView.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (!mAppState.isRecording()) {
mRecordView.setImageResource(R.drawable.btn_recording);
mRecordView.setAlpha((float) 0.3);
startService(new Intent(MainActivity.this,
RecorderService.class));
} else {
mRecordView.setImageResource(R.drawable.btn_not_recording);
mRecordView.setAlpha((float) 0.5);
stopService(new Intent(MainActivity.this,
RecorderService.class));
}
}
});
mMenuButtonView = (ImageView) findViewById(R.id.iv_menu);
mMenuButtonView.setImageResource(R.drawable.btn_menu);
mMenuButtonView.setAlpha((float) 0.5);
mMenuButtonView.bringToFront();
mMenuButtonView.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mMenuView.getVisibility() == View.VISIBLE) {
mMenuView.setVisibility(View.INVISIBLE);
} else {
mMenuView.setVisibility(View.VISIBLE);
}
}
});
mSettingsTextView.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mAppState.isLoggingEnabled())
Log.v(TAG, "settings clicked!");
}
});
mVideosTextView.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mAppState.isLoggingEnabled())
Log.v(TAG, "videos clicked!");
}
});
}
#Override
protected void onDestroy() {
if (mAppState.isLoggingEnabled())
Log.v(TAG, "APPLICATION EXIT!");
if (mCamera != null) {
mCamera.release(); // release the camera for other applications
mCamera = null;
}
super.onDestroy();
}
public boolean onCreateOptionsMenu(Menu menu) {
if (mMenuView.getVisibility() == View.VISIBLE) {
mMenuView.setVisibility(View.INVISIBLE);
} else {
mMenuView.setVisibility(View.VISIBLE);
}
return false;
}
}
public class RecorderService extends Service {
private static final String TAG = "RecorderService";
private static Camera mServiceCamera;
private MediaRecorder mMediaRecorder;
private GlobalState mAppState;
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
#Override
public void onCreate() {
mAppState = (GlobalState) getApplicationContext();
mServiceCamera = mAppState.getCamera();
if (mAppState.isLoggingEnabled())
Log.v(TAG, "onCreate");
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
super.onStartCommand(intent, flags, startId);
if (!mAppState.isRecording()) {
if (prepareVideoRecorder()) {
mMediaRecorder.start();
mAppState.setRecording(true);
} else {
releaseMediaRecorder();
}
}
return 5;
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
#Override
public void onDestroy() {
if (mAppState.isLoggingEnabled())
Log.v(TAG, "onDestroy");
// stop recording and release camera
mMediaRecorder.stop(); // stop the recording
releaseMediaRecorder(); // release the MediaRecorder object
mServiceCamera.lock(); // take camera access back from MediaRecorder
mAppState.setRecording(false);
super.onDestroy();
}
private void releaseMediaRecorder() {
if (mMediaRecorder != null) {
mMediaRecorder.reset(); // clear recorder configuration
mMediaRecorder.release(); // release the recorder object
mMediaRecorder = null;
}
}
/** Create a file Uri for saving an image or video */
private static Uri getOutputMediaFileUri(int type) {
return Uri.fromFile(getOutputMediaFile(type));
}
/** Create a File for saving an image or video */
private static File getOutputMediaFile(int type) {
File mediaStorageDir = new File(
Environment.getExternalStorageDirectory(), "DashEyeApp");
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss")
.format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "IMG_" + timeStamp + ".jpg");
} else if (type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "VID_" + timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
private boolean prepareVideoRecorder() {
mMediaRecorder = new MediaRecorder();
mServiceCamera.unlock();
mMediaRecorder.setCamera(mServiceCamera);
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mMediaRecorder.setProfile(CamcorderProfile
.get(CamcorderProfile.QUALITY_HIGH));
mMediaRecorder.setOutputFile(getOutputMediaFile(MEDIA_TYPE_VIDEO)
.toString());
// Step 5: Set the preview output
// mMediaRecorder.setPreviewDisplay(mAppState.getCameraPreview().getHolder().getSurface());
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
Log.d(TAG,
"IllegalStateException preparing MediaRecorder: "
+ e.getMessage());
releaseMediaRecorder();
return false;
} catch (IOException e) {
Log.d(TAG, "IOException preparing MediaRecorder: " + e.getMessage());
releaseMediaRecorder();
return false;
}
return true;
}
}
I am sorry for the wall of text and I greatly appreciate any help!
my solution is fine, try it:
Service android:
import java.util.Calendar;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.view.Gravity;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.view.WindowManager.LayoutParams;
public class BackgroundVideoRecorder extends Service implements
SurfaceHolder.Callback {
private WindowManager windowManager;
private SurfaceView surfaceView;
private Camera camera = null;
private MediaRecorder mediaRecorder = null;
int contTime = 0, duracaoGravacao = 30; //interval in seconds to record video
private class thread implements Runnable {
public void run() {
contTime++;
if (contTime >= duracaoGravacao) {
StopService();
}
tick_Handler.postDelayed(tick_thread, 1000);
}
}
Handler tick_Handler;
thread tick_thread;
Preferences pref;
#Override
public void onCreate() {
windowManager = (WindowManager) this
.getSystemService(Context.WINDOW_SERVICE);
surfaceView = new SurfaceView(this);
LayoutParams layoutParams = new WindowManager.LayoutParams(1, 1,
WindowManager.LayoutParams.TYPE_SYSTEM_OVERLAY,
WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH,
PixelFormat.TRANSLUCENT);
layoutParams.gravity = Gravity.LEFT | Gravity.TOP;
windowManager.addView(surfaceView, layoutParams);
surfaceView.getHolder().addCallback(this);
tick_Handler = new Handler();
tick_thread = new thread();
VIDEO_RECORDER_FOLDER = new _Path().getPathVideo();
}
#Override
public void onStart(Intent intent, int startId) {
tick_Handler.post(tick_thread);
}
// Method called right after Surface created (initializing and starting
// MediaRecorder)
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
boolean found = false;
int i = 0;
try {
for (i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.CameraInfo newInfo = new Camera.CameraInfo();
Camera.getCameraInfo(i, newInfo);
if (newInfo.facing == CameraInfo.CAMERA_FACING_FRONT) {
found = true;
break;
}
}
} catch (Exception e) {
e.printStackTrace();
}
if (found) {
camera = Camera.open(i);
} else {
camera = Camera.open();
}
Calendar lCDateTime = Calendar.getInstance();
String t = String.valueOf(lCDateTime.getTimeInMillis());
nomeArquivo = "hire_me_now_" + t + ".mp4";
nomeArquivo = nomeArquivo.replace(" ", "_").replace(":", "_")
.replace("-", "_");
String caminhoArquivo = VIDEO_RECORDER_FOLDER + "/" + nomeArquivo;
mediaRecorder = new MediaRecorder();
camera.unlock();
mediaRecorder.setPreviewDisplay(surfaceHolder.getSurface());
mediaRecorder.setCamera(camera);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mediaRecorder.setProfile(CamcorderProfile
.get(CamcorderProfile.QUALITY_QVGA));
mediaRecorder.setVideoFrameRate(15);
mediaRecorder.setOutputFile(caminhoArquivo);
try {
mediaRecorder.prepare();
} catch (Exception e) {
e.printStackTrace();
}
mediaRecorder.start();
}
// Stop recording and remove SurfaceView
#Override
public void onDestroy() {
mediaRecorder.stop();
mediaRecorder.reset();
mediaRecorder.release();
camera.lock();
camera.release();
windowManager.removeView(surfaceView);
}
protected void StopService() {
try {
this.stopSelf();
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format,
int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
}
It turns out that Android doesn't like that the preview gets destroyed (when, for example, the user hits the "Home" button) so it cuts out video recording.
The workaround to this is using WindowManager to set an overlay and, when the user hits the "Home" button, resize it to 1x1. I found the solution here. Many thanks to cman!
it can be done but from API level 23 you will need ask for camera permissions, you can refer to this answer https://stackoverflow.com/a/49919386/4604234