The following code allows you to start a video player and play the video.
The problem is that the NavigationBar hides a piece of the video, I would like to make it transparent.
import android.app.Activity;
import android.content.res.Configuration;
import android.graphics.Color;
import android.graphics.Point;
import android.media.MediaPlayer;
import android.widget.MediaController;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.view.MotionEvent;
import android.net.Uri;
import android.os.Bundle;
import android.util.Log;
import android.view.Display;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.MediaController;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.VideoView;
public class SimpleVideoStream extends Activity implements
MediaPlayer.OnCompletionListener, MediaPlayer.OnPreparedListener,
MediaPlayer.OnErrorListener, MediaPlayer.OnBufferingUpdateListener {
private String TAG = getClass().getSimpleName();
private VideoView mVideoView = null;
private MediaPlayer mMediaPlayer = null;
private MediaController mMediaController = null;
private ProgressBar mProgressBar = null;
private String mVideoUrl;
private Boolean mShouldAutoClose = true;
private boolean mControls;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
this.getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
Bundle b = getIntent().getExtras();
mVideoUrl = b.getString("mediaUrl");
mShouldAutoClose = b.getBoolean("shouldAutoClose", true);
mControls = b.getBoolean("controls", true);
RelativeLayout relLayout = new RelativeLayout(this);
relLayout.setBackgroundColor(Color.BLACK);
RelativeLayout.LayoutParams relLayoutParam = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT);
relLayoutParam.addRule(RelativeLayout.CENTER_IN_PARENT, RelativeLayout.TRUE);
mVideoView = new VideoView(this);
mVideoView.setLayoutParams(relLayoutParam);
relLayout.addView(mVideoView);
// Create progress throbber
mProgressBar = new ProgressBar(this);
mProgressBar.setIndeterminate(true);
// Center the progress bar
RelativeLayout.LayoutParams pblp = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.WRAP_CONTENT);
pblp.addRule(RelativeLayout.CENTER_IN_PARENT, RelativeLayout.TRUE);
mProgressBar.setLayoutParams(pblp);
// Add progress throbber to view
relLayout.addView(mProgressBar);
mProgressBar.bringToFront();
setOrientation(b.getString("orientation"));
setContentView(relLayout, relLayoutParam);
play();
}
private void play() {
mProgressBar.setVisibility(View.VISIBLE);
Uri videoUri = Uri.parse(mVideoUrl);
try {
mVideoView.setOnCompletionListener(this);
mVideoView.setOnPreparedListener(this);
mVideoView.setOnErrorListener(this);
mVideoView.setVideoURI(videoUri);
mMediaController = new MediaController(this);
mMediaController.setAnchorView(mVideoView);
mMediaController.setMediaPlayer(mVideoView);
if (!mControls) {
mMediaController.setVisibility(View.GONE);
}
mVideoView.setMediaController(mMediaController);
} catch (Throwable t) {
Log.d(TAG, t.toString());
}
}
private void setOrientation(String orientation) {
if ("landscape".equals(orientation)) {
this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}else if("portrait".equals(orientation)) {
this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}
}
private Runnable checkIfPlaying = new Runnable() {
#Override
public void run() {
if (mVideoView.getCurrentPosition() > 0) {
// Video is not at the very beginning anymore.
// Hide the progress bar.
mProgressBar.setVisibility(View.GONE);
} else {
// Video is still at the very beginning.
// Check again after a small amount of time.
mVideoView.postDelayed(checkIfPlaying, 100);
}
}
};
#Override
public void onPrepared(MediaPlayer mp) {
Log.d(TAG, "Stream is prepared");
mMediaPlayer = mp;
mMediaPlayer.setOnBufferingUpdateListener(this);
mVideoView.requestFocus();
mVideoView.start();
mVideoView.postDelayed(checkIfPlaying, 0);
}
private void pause() {
Log.d(TAG, "Pausing video.");
mVideoView.pause();
}
private void stop() {
Log.d(TAG, "Stopping video.");
mVideoView.stopPlayback();
}
#Override
public void onDestroy() {
super.onDestroy();
Log.d(TAG, "onDestroy triggered.");
stop();
}
private void wrapItUp(int resultCode, String message) {
Log.d(TAG, "wrapItUp was triggered.");
Intent intent = new Intent();
intent.putExtra("message", message);
setResult(resultCode, intent);
finish();
}
public void onCompletion(MediaPlayer mp) {
Log.d(TAG, "onCompletion triggered.");
stop();
if (mShouldAutoClose) {
wrapItUp(RESULT_OK, null);
}
}
public boolean onError(MediaPlayer mp, int what, int extra) {
StringBuilder sb = new StringBuilder();
sb.append("MediaPlayer Error: ");
switch (what) {
case MediaPlayer.MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
sb.append("Not Valid for Progressive Playback");
break;
case MediaPlayer.MEDIA_ERROR_SERVER_DIED:
sb.append("Server Died");
break;
case MediaPlayer.MEDIA_ERROR_UNKNOWN:
sb.append("Unknown");
break;
default:
sb.append(" Non standard (");
sb.append(what);
sb.append(")");
}
sb.append(" (" + what + ") ");
sb.append(extra);
Log.e(TAG, sb.toString());
wrapItUp(RESULT_CANCELED, sb.toString());
return true;
}
public void onBufferingUpdate(MediaPlayer mp, int percent) {
Log.d(TAG, "onBufferingUpdate : " + percent + "%");
}
#Override
public void onBackPressed() {
// If we're leaving, let's finish the activity
wrapItUp(RESULT_OK, null);
}
#Override
public void onConfigurationChanged(Configuration newConfig) {
// The screen size changed or the orientation changed... don't restart the activity
super.onConfigurationChanged(newConfig);
}
#Override
public boolean onTouchEvent(MotionEvent event) {
if (mMediaController != null)
mMediaController.show();
return false;
}
}
I tried to add this, but it does not work very well, the problem is that the progressBar overlaps the NavigationBar, when the device is in portrait mode.
#Override
public boolean onTouchEvent(MotionEvent event) {
if (mMediaController != null){
mMediaController.show();
this.getWindow().getDecorView().setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION // hide nav bar
| View.SYSTEM_UI_FLAG_FULLSCREEN // hide status bar
| View.SYSTEM_UI_FLAG_IMMERSIVE
);
}
return false;
}
Related
I am making a audio player app. I created a service for audio player and playback notification. I am facing problem with the previous/next buttons in the Player as well as the playback notification.
The previous/next buttons in the audio playback notification just restarts the playback and do not change the song/audio file. Other controls work well.
The previous/next buttons in the audio player (using exoplayer) change and play the next/previous file but the player artwork (image for audio file) do not change. Also, the play/pause button remains in paused state and when pressed, it plays a different file.
Here is the code for the AudioActivity and AudioService-
AudioActivity.java
package com.example.videoplayer.audio;
import static com.example.videoplayer.RecyclerViewClasses.MediaFilesActivity.FOLDER_NAME_KEY;
import static com.example.videoplayer.RecyclerViewClasses.MediaFilesActivity.MY_PREF;
import android.annotation.SuppressLint;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.SharedPreferences;
import android.net.Uri;
import android.os.Bundle;
import android.os.IBinder;
import android.util.Log;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.widget.ImageView;
import android.widget.PopupMenu;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import com.example.videoplayer.MediaFiles;
import com.example.videoplayer.MediaPlayer.PlaylistDialog;
import com.example.videoplayer.R;
import com.example.videoplayer.RecyclerViewClasses.MediaFilesAdapter;
import com.google.android.exoplayer2.PlaybackException;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.SimpleExoPlayer;
import com.google.android.exoplayer2.TracksInfo;
import com.google.android.exoplayer2.ui.PlayerView;
import com.google.android.exoplayer2.util.Util;
import java.util.ArrayList;
public class AudioActivity extends AppCompatActivity implements View.OnClickListener {
private static final String TAG = AudioActivity.class.getSimpleName();
private static final int NOTIFICATION_ID = 1;
private static final String CHANNEL_ID = "channel_id";
PlayerView playerView;
SimpleExoPlayer player;
//public static SimpleExoPlayer player;
RelativeLayout root;
ImageView audioBack, audioList, menu_more;
ImageView nextButton, previousButton;
TextView playlistTitle, title;
ArrayList<MediaFiles> mAudioFilesArrayList;
int position;
MediaFilesAdapter mediaFilesAdapter;
String audioTitle;
String listTitle;
private AudioService mService;
private boolean mBound = false;
private Intent serviceIntent;
private ServiceConnection mServiceConnection = new ServiceConnection() {
#Override
public void onServiceConnected(ComponentName componentName, IBinder iBinder) {
AudioService.LocalBinder binder = (AudioService.LocalBinder) iBinder;
mService = binder.getService();
mBound = true;
Log.i(TAG + " ###", "in onServiceConnected: calling initializePlayer()");
initializePlayer();
}
#Override
public void onServiceDisconnected(ComponentName componentName) {
mBound = false;
}
};
void initializePlayer() {
if (mBound) {
Log.i(TAG + " ###", "initializePlayer() called");
player = mService.getplayerInstance();
// playerView.setVisibility(View.VISIBLE);
// player.setPlayWhenReady(true);
playerView.setPlayer(player);
// player.play();
// player.seekTo(position, C.TIME_UNSET);
playError();
}
}
#SuppressLint("UseCompatLoadingForDrawables")
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
Log.i(TAG + " ###", "onCreate() called");
super.onCreate(savedInstanceState);
setFullScreen();
setContentView(R.layout.activity_audio_player);
try {
getSupportActionBar().hide();
} catch (NullPointerException e) {
Log.e(TAG + " ###", "in VideoPlayerActivity: " + e);
}
playerView = findViewById(R.id.exoplayer_view_audio);
Log.i(TAG + " ###", "default artwork set in onCreate()");
root = findViewById(R.id.root_layout_audio);
audioBack = findViewById(R.id.audio_back_audio);
audioList = findViewById(R.id.audio_list);
nextButton = findViewById(R.id.exo_next);
previousButton = findViewById(R.id.exo_prev);
playlistTitle = findViewById(R.id.playlistTitle);
title = findViewById(R.id.audiofile_title);
menu_more = findViewById(R.id.audio_more);
SharedPreferences preferences = this.getSharedPreferences(MY_PREF, Context.MODE_PRIVATE);
listTitle = preferences.getString(FOLDER_NAME_KEY, "DEFAULT_FOLDER_NAME");
position = getIntent().getIntExtra("position", 1);
audioTitle = getIntent().getStringExtra("media_title");
Bundle b = getIntent().getBundleExtra(this.getString(R.string.bundle));
mAudioFilesArrayList = b.getParcelableArrayList(this.getString(R.string.media_array_list));
Log.i(TAG + " ###", "position=" + position);
Log.i(TAG + " ###", "is mAudioFilesArrayList null " + (mAudioFilesArrayList == null));
if (mAudioFilesArrayList == null)
return;
audioList.setOnClickListener(this);
nextButton.setOnClickListener(this);
previousButton.setOnClickListener(this);
audioBack.setOnClickListener(this);
menu_more.setOnClickListener(this);
startServiceIntent();
}
void startServiceIntent() {
Log.i(TAG + " ###", "entered startServiceIntent");
serviceIntent = new Intent(this, AudioService.class);
Log.i(TAG + " ###", "position=" + position);
serviceIntent.putExtra("position", position);
Bundle bundle = new Bundle();
Log.i(TAG + " ###", "is mediaArrayList null " + (mAudioFilesArrayList == null));
bundle.putParcelableArrayList(this.getString(R.string.media_array_list), mAudioFilesArrayList);
serviceIntent.putExtra("bundle", bundle);
Util.startForegroundService(this, serviceIntent);
playerView.setControllerHideOnTouch(false);
playerView.setUseController(true);
playerView.setControllerShowTimeoutMs(0);
playerView.showController();
playerView.setControllerAutoShow(true);
setUI();
}
private void playError() {
Log.i(TAG + " ###", "entered the playError() method()");
//Player.EventListener is deprecated
player.addListener(new Player.Listener() {
#Override
public void onTracksInfoChanged(#NonNull TracksInfo tracksInfo) {
Player.Listener.super.onTracksInfoChanged(tracksInfo);
Toast.makeText(AudioActivity.this, "TrackInfoChanged", Toast.LENGTH_SHORT).show();
}
#Override
public void onPlayerError(#NonNull PlaybackException error) {
Toast.makeText(AudioActivity.this, "Audio Playing Error", Toast.LENGTH_SHORT).show();
}
});
player.setPlayWhenReady(true);
}
private void setFullScreen() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
}
#Override
protected void onStart() {
super.onStart();
Log.i(TAG + " ###", "entered onStart()");
getApplicationContext().bindService(serviceIntent, mServiceConnection, 0);
initializePlayer();
setUI();
}
void setUI() {
Log.i(TAG + " ###", "entered setUI()");
title.setText(audioTitle);
playlistTitle.setText(listTitle);
Log.i(TAG + " ###", "text set in setUI()");
//playerView.setDefaultArtwork(AppCompatResources.getDrawable(this,R.drawable.ic_sharp_music_note_24));
//playerView.setUseArtwork(true);
}
#Override
protected void onResume() {
super.onResume();
Log.i(TAG + " ###", "onResume() called");
}
#Override
protected void onDestroy() {
super.onDestroy();
}
#Override
protected void onStop() {
Log.i(TAG + " ###", "onStop() called, unbinding service!");
getApplicationContext().unbindService(mServiceConnection);
mBound = false;
finish();
super.onStop();
}
#Override
public void onClick(View view) {
Log.i(TAG + " ###", "entered onClick() method");
switch (view.getId()) {
case R.id.exo_play:
player.play();
break;
case R.id.exo_pause:
player.pause();
break;
case R.id.exo_next:
try {
player.stop();
stopService(serviceIntent);
position++;
audioTitle = mAudioFilesArrayList.get(position).getTitle();
title.setText(audioTitle);
startServiceIntent();
initializePlayer();
player = mService.getplayerInstance();
//playerView.setPlayer(null);
playerView.setPlayer(player);
playError();
} catch (Exception e) {
Toast.makeText(this, "No next audio file", Toast.LENGTH_SHORT).show();
position--;
}
break;
case R.id.exo_prev:
Log.i(TAG + " ###", "previous button clicked!");
try {
player.stop();
Log.i(TAG+" ###","player stopped!");
stopService(serviceIntent);
position--;
audioTitle = mAudioFilesArrayList.get(position).getTitle();
title.setText(audioTitle);
startServiceIntent();
//initializePlayer();
player=mService.getplayerInstance();
//playerView.setPlayer(null);
playerView.setPlayer(player);
playError();
} catch (Exception e) {
Toast.makeText(this, "No previous Video", Toast.LENGTH_SHORT).show();
position++;
}
break;
case R.id.audio_back_audio:
if (player != null)
//player.release();
finish();
break;
case R.id.audio_list:
PlaylistDialog playlistDialog = new PlaylistDialog(mAudioFilesArrayList, mediaFilesAdapter);
playlistDialog.show(getSupportFragmentManager(), playlistDialog.getTag());
break;
case R.id.audio_more:
PopupMenu popupMenu = new PopupMenu(this, menu_more);
MenuInflater inflater = popupMenu.getMenuInflater();
inflater.inflate(R.menu.actions_video, popupMenu.getMenu());
popupMenu.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() {
#Override
public boolean onMenuItemClick(MenuItem menuItem) {
int id = menuItem.getItemId();
switch (id) {
case R.id.share_file:
Intent shareIntent = new Intent(Intent.ACTION_SEND);
String filepath = mAudioFilesArrayList.get(position).getPath();
Uri uri = Uri.parse(filepath);
shareIntent.setType("/");
shareIntent.putExtra(Intent.EXTRA_STREAM, uri);
startActivity(Intent.createChooser(shareIntent, "Share File using"));
break;
}
return false;
}
});
popupMenu.show();
break;
}
}
}
AudioSerivce.java
package com.example.videoplayer.audio;
import android.annotation.SuppressLint;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Binder;
import android.os.Build;
import android.os.Bundle;
import android.os.IBinder;
import android.util.Log;
import androidx.annotation.Nullable;
import com.example.videoplayer.MediaFiles;
import com.example.videoplayer.R;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.SimpleExoPlayer;
import com.google.android.exoplayer2.audio.AudioAttributes;
import com.google.android.exoplayer2.source.ConcatenatingMediaSource;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.ProgressiveMediaSource;
import com.google.android.exoplayer2.ui.PlayerNotificationManager;
import com.google.android.exoplayer2.upstream.DefaultDataSource;
import java.io.File;
import java.util.ArrayList;
public class AudioService extends Service {
private static final String TAG = AudioService.class.getSimpleName();
private final IBinder mBinder = new LocalBinder();
private SimpleExoPlayer player;
private int position;
PlaybackParameters parameters;
private ArrayList<MediaFiles> mAudioFilesArrayList;
private PlayerNotificationManager.Builder playerNotificationManagerBuilder;
private PlayerNotificationManager playerNotificationManager;
ConcatenatingMediaSource concatenatingMediaSource;
private static final int NOTIFICATION_ID = 1;
private static final String CHANNEL_ID = "channel_id";
#Override
public void onCreate() {
super.onCreate();
}
#Override
public void onDestroy() {
Log.i(TAG+" ###","onDestroy() called");
releasePlayer();
super.onDestroy();
}
private void releasePlayer() {
if (player != null) {
Log.i(TAG+" ###","Player!=null and releasePlayer called");
playerNotificationManager.setPlayer(null);
player.release();
player = null;
}
}
#Nullable
#Override
public IBinder onBind(Intent intent) {
return mBinder;
}
public SimpleExoPlayer getplayerInstance() {
Log.i(TAG+" ###","getplayerInstance() called: "+(player==null));
if (player == null) {
startPlayer();
}
return player;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
// try {
Bundle b = intent.getBundleExtra(this.getString(R.string.bundle));
String name=this.getString( R.string.media_array_list);
mAudioFilesArrayList = b.getParcelableArrayList(this.getString( R.string.media_array_list));
// }
// catch(Exception e)
// {
// Log.e(AudioService.class.getSimpleName()+" ###",e.toString());
// stopSelf();
// }
position = intent.getIntExtra("position", 1);
Log.i(TAG+"###"," in onStartCommand()");
Log.i(TAG+" ###","is mediaArrayList null"+(mAudioFilesArrayList==null));
Log.i(TAG+" ###","position="+position);
if (player == null) {
Log.i(TAG+" ###","Player is null, in onStartCommand calling startPlayer()");
Log.i(TAG+" ###","is mediaArrayList null"+(mAudioFilesArrayList==null));
Log.i(TAG+" ###","position="+position);
startPlayer();
}
return START_STICKY;
}
private void startPlayer() {
Log.i(TAG+" ###","entered startPlayer()");
Log.i(TAG+" ###","is mediaArrayList null"+(mAudioFilesArrayList==null));
Log.i(TAG+" ###","position="+position);
final Context context = this;
createNotificationChannel();
playerNotificationManagerBuilder = new PlayerNotificationManager.Builder(context, NOTIFICATION_ID, CHANNEL_ID)
.setRewindActionIconResourceId(R.drawable.ic_rewind)
.setNextActionIconResourceId(R.drawable.ic_next)
.setPreviousActionIconResourceId(R.drawable.ic_previous)
.setSmallIconResourceId(R.drawable.ic_sharp_music_note_24);
playerNotificationManagerBuilder.setPlayActionIconResourceId(R.drawable.ic_play);
playerNotificationManagerBuilder.setPauseActionIconResourceId(R.drawable.ic_pause);
playerNotificationManagerBuilder.setFastForwardActionIconResourceId(R.drawable.ic_fast_forward);
playerNotificationManagerBuilder.setMediaDescriptionAdapter(new PlayerNotificationManager.MediaDescriptionAdapter() {
#Override
public CharSequence getCurrentContentTitle(Player player) {
return mAudioFilesArrayList.get(position).getTitle();
}
#Nullable
#Override
public PendingIntent createCurrentContentIntent(Player player) {
Intent intent = new Intent(context, AudioActivity.class);
intent.putExtra("position", position);
Bundle bundle = new Bundle();
bundle.putParcelableArrayList(context.getString( R.string.media_array_list), mAudioFilesArrayList);
intent.putExtra("bundle",bundle);
return PendingIntent.getActivity(context, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT|PendingIntent.FLAG_IMMUTABLE);
}
#Nullable
#Override
public CharSequence getCurrentContentText(Player player) {
return "Summary";
}
#Nullable
#Override
public Bitmap getCurrentLargeIcon(Player player, PlayerNotificationManager.BitmapCallback callback) {
int window=player.getCurrentMediaItemIndex();
Log.i(TAG+"###","in getCurrentLargeIcon: window="+window);
return null;
// return getCurrentLargeIcon(player,callback);
}
});
playerNotificationManagerBuilder.setNotificationListener(new PlayerNotificationManager.NotificationListener() {
#Override
public void onNotificationCancelled(int notificationId, boolean dismissedByUser) {
Log.i(TAG+" ###","onNotificationCancelled called!");
stopSelf();
}
#Override
public void onNotificationPosted(int notificationId, Notification notification, boolean ongoing) {
startForeground(notificationId, notification);
Log.i(TAG+" ###","startForeground() in onNotificationPosted!");
}
});
playerNotificationManager=playerNotificationManagerBuilder.build();
Log.i(TAG+" ###","startPlayer() called");
String path = mAudioFilesArrayList.get(position).getPath();
Uri uri = Uri.parse(path);
// PlayerView.Builder builder = new ExoPlayer.Builder(this);
// builder.setSeekForwardIncrementMs(5000);
// builder.setSeekBackIncrementMs(5000);
// player = builder.build();
SimpleExoPlayer.Builder builder = new SimpleExoPlayer.Builder(this);
builder.setSeekForwardIncrementMs(10000);
builder.setSeekBackIncrementMs(10000);
player = builder.build();
DefaultDataSource.Factory dataSourceFactory = new DefaultDataSource.Factory(this);
concatenatingMediaSource = new ConcatenatingMediaSource();
for (int i = 0; i < mAudioFilesArrayList.size(); i++) {
new File(String.valueOf(mAudioFilesArrayList.get(i)));
MediaSource mediaSource = new ProgressiveMediaSource.Factory(dataSourceFactory)
.createMediaSource(MediaItem.fromUri(Uri.parse(String.valueOf(uri))));
concatenatingMediaSource.addMediaSource(mediaSource);
}
player.setMediaSource(concatenatingMediaSource);
AudioAttributes audioAttributes = new AudioAttributes.Builder()
.setUsage(C.USAGE_MEDIA)
.setContentType(C.CONTENT_TYPE_MUSIC)
.build();
player.setAudioAttributes(audioAttributes, true);
player.setPlaybackParameters(parameters);
player.prepare();
player.play();
player.seekTo(position,C.TIME_UNSET);
//player.setPlayWhenReady(true);
playerNotificationManager.setPlayer(player);
playerNotificationManager.setUseNextAction(true);
playerNotificationManager.setSmallIcon(R.drawable.ic_sharp_music_note_24);
playerNotificationManager.setColorized(true);
playerNotificationManager.setUseChronometer(true);
playerNotificationManager.setColor(R.color.teal_200);
playerNotificationManager.setUseChronometer(true);
playerNotificationManager.setUseRewindAction(true);
playerNotificationManager.setUseFastForwardAction(true);
playerNotificationManager.setUsePlayPauseActions(true);
Log.i(TAG+" ###","returning from startPlayer()");
}
private void createNotificationChannel() {
Log.i(TAG+" ###","createNotificationChannel()");
// Create the NotificationChannel, but only on API 26+ because
// the NotificationChannel class is new and not in the support library
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
CharSequence name = getString(R.string.channel_name);
String description = getString(R.string.channel_description);
int importance = NotificationManager.IMPORTANCE_DEFAULT;
NotificationChannel channel = new NotificationChannel(CHANNEL_ID, name, importance);
channel.setDescription(description);
// Register the channel with the system; you can't change the importance
// or other notification behaviors after this
NotificationManager notificationManager = getSystemService(NotificationManager.class);
notificationManager.createNotificationChannel(channel);
}
}
public class LocalBinder extends Binder {
public AudioService getService() {
return AudioService.this;
}
}
}
Please help me in finding the problem and resolving it.
In my opinion the use of MediaSessionCompat is essential for an audio app. A very good instructions can be found here.
I hope this helps you.
GGK
Been through hell coding my first android app.
E/Camera: Error 2 in Logcat whenever camera is used (the error code is for multiple camera uses)
I have attached the entire project if you want to and can run it. Kindly help.
Link to Project on Google Drive
I am also attaching the code to the main files in this post if you want to view it directly.
Here is MainActivity.java
package com.example.cse535a1;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.content.FileProvider;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.MotionEvent;
import android.widget.Button;
import android.content.Context;
import android.view.View;
import android.hardware.*;
import android.widget.FrameLayout;
import android.widget.TextView;
import org.opencv.core.*;
import org.opencv.videoio.VideoCapture;
import java.io.File;
public class MainActivity extends AppCompatActivity implements SensorEventListener {
private Camera c;
private CameraView cv1;
private FrameLayout view_camera;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (!(getApplicationContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA))) {
this.finish();
System.exit(0);
}
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
VideoCapture video_capture;
Button button_symptoms = (Button)findViewById(R.id.button_symptoms);
Button button_upload_signs = (Button)findViewById(R.id.button_upload_signs);
Button button_measure_heart_rate = (Button)findViewById(R.id.button_measure_heart_rate);
Button button_measure_respiratory_rate = (Button)findViewById(R.id.button_measure_respiratory_rate);
c = getcam();
cv1 = new CameraView(getApplicationContext(), c);
view_camera = (FrameLayout)findViewById(R.id.view_camera);
view_camera.addView(cv1);
TextView finger_on_sensor = (TextView)findViewById(R.id.text_finger_on_sensor);
finger_on_sensor.setVisibility(View.INVISIBLE);
finger_on_sensor.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View arg_view, MotionEvent arg_me) {
finger_on_sensor.setVisibility(View.INVISIBLE);
File file_video = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/video_finger.mp4");
final int VIDEO_CAPTURE = 1;
Intent intent_record_video = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
intent_record_video.putExtra(MediaStore.EXTRA_DURATION_LIMIT, 45);
Uri fileUri = FileProvider.getUriForFile(MainActivity.this, "com.example.cse535a1.provider", file_video);
intent_record_video.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
c.release();
startActivityForResult(intent_record_video, VIDEO_CAPTURE);
c.stopPreview();
return true;
}
});
button_symptoms.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
Intent intent = new Intent(getApplicationContext(), Loggin_symptoms.class);
startActivity(intent);
}
});
button_upload_signs.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
}
});
button_measure_heart_rate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
finger_on_sensor.setVisibility(View.VISIBLE);
}
});
button_measure_respiratory_rate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
SensorManager manager_sensor = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
Sensor sensor_accelerometer = manager_sensor.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
manager_sensor.registerListener(MainActivity.this, sensor_accelerometer, SensorManager.SENSOR_DELAY_NORMAL);
}
});
}
#Override
public void onSensorChanged(SensorEvent arg_event) {
float x = arg_event.values[0];
float y = arg_event.values[1];
float z = arg_event.values[2];
Log.i("ACCELEROMETER", String.valueOf(x) + ' ' + String.valueOf(y) + ' ' + String.valueOf(z));
}
#Override
public void onAccuracyChanged(Sensor arg_sensor, int arg_accuracy) {
}
public Camera getcam() {
Camera c = null;
try { c = Camera.open(0); }
catch (Exception e) {
}
return c;
}
#Override
protected void onResume() {
super.onResume();
c = getcam();
cv1 = new CameraView(getApplicationContext(), c);
view_camera.addView(cv1);
}
#Override
protected void onDestroy() {
c.stopPreview();
c.release();
c = null;
super.onDestroy();
}
}
Here is CameraView.java
package com.example.cse535a1;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.content.Context;
import android.hardware.Camera;
import java.io.IOException;
public class CameraView extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder holder_surface;
private Camera camera_selected;
public CameraView(Context arg_context, Camera arg_camera) {
super(arg_context);
// Log.i("Cam", "constructor");
camera_selected = arg_camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
holder_surface = getHolder();
holder_surface.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
holder_surface.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder arg_holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
camera_selected.setPreviewDisplay(arg_holder);
camera_selected.startPreview();
// Log.i("Cam", "surface creator");
} catch (IOException e) {
// Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
}
public void surfaceChanged(SurfaceHolder arg_holder, int arg_format, int arg_width, int arg_height) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (holder_surface.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
camera_selected.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
camera_selected.setPreviewDisplay(holder_surface);
camera_selected.startPreview();
} catch (Exception e){
// Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
}
Here are updated files. It seems removing statements which release camera seems to remove errors of using camera after releasing. Unlocking the camera in onPause() removed-> E/Camera: Error 2
MainActivity.java
package com.example.cse535a1;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.content.FileProvider;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.MotionEvent;
import android.widget.Button;
import android.content.Context;
import android.view.View;
import android.hardware.*;
import android.widget.FrameLayout;
import android.widget.TextView;
import org.opencv.core.*;
import org.opencv.videoio.VideoCapture;
import java.io.File;
public class MainActivity extends AppCompatActivity implements SensorEventListener {
private Camera c;
private CameraView cv1;
private FrameLayout view_camera;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (!(getApplicationContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA))) {
this.finish();
System.exit(0);
}
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
VideoCapture video_capture;
Button button_symptoms = (Button)findViewById(R.id.button_symptoms);
Button button_upload_signs = (Button)findViewById(R.id.button_upload_signs);
Button button_measure_heart_rate = (Button)findViewById(R.id.button_measure_heart_rate);
Button button_measure_respiratory_rate = (Button)findViewById(R.id.button_measure_respiratory_rate);
cv1 = new CameraView(getApplicationContext(), this);
view_camera = (FrameLayout)findViewById(R.id.view_camera);
view_camera.addView(cv1);
TextView finger_on_sensor = (TextView)findViewById(R.id.text_finger_on_sensor);
finger_on_sensor.setVisibility(View.INVISIBLE);
finger_on_sensor.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View arg_view, MotionEvent arg_me) {
finger_on_sensor.setVisibility(View.INVISIBLE);
File file_video = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/video_finger.mp4");
final int VIDEO_CAPTURE = 101;
Intent intent_record_video = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
intent_record_video.putExtra(MediaStore.EXTRA_DURATION_LIMIT, 45);
Uri fileUri = FileProvider.getUriForFile(MainActivity.this, "com.example.cse535a1.provider", file_video);
intent_record_video.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
startActivityForResult(intent_record_video, VIDEO_CAPTURE);
return false;
}
});
button_symptoms.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
Intent intent = new Intent(getApplicationContext(), Loggin_symptoms.class);
startActivity(intent);
}
});
button_upload_signs.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
}
});
button_measure_heart_rate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
finger_on_sensor.setVisibility(View.VISIBLE);
}
});
button_measure_respiratory_rate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
SensorManager manager_sensor = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
Sensor sensor_accelerometer = manager_sensor.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
manager_sensor.registerListener(MainActivity.this, sensor_accelerometer, SensorManager.SENSOR_DELAY_NORMAL);
}
});
}
public void setCam(Camera arg_camera) {
c = arg_camera;
}
#Override
public void onSensorChanged(SensorEvent arg_event) {
float x = arg_event.values[0];
float y = arg_event.values[1];
float z = arg_event.values[2];
Log.i("ACCELEROMETER", String.valueOf(x) + ' ' + String.valueOf(y) + ' ' + String.valueOf(z));
}
#Override
public void onAccuracyChanged(Sensor arg_sensor, int arg_accuracy) {
}
public Camera getcam() {
Camera c = null;
try { c = Camera.open(0); }
catch (Exception e) {
}
return c;
}
#Override
protected void onPause() {
super.onPause();
c.unlock();
// if (c != null) {
// c.stopPreview();
// c.release();
// c = null;
// }
}
#Override
protected void onResume() {
super.onResume();
// if (c != null) {
// c.stopPreview();
// c.release();
// c = null;
// }
// cv1 = new CameraView(getApplicationContext(), this);
// view_camera.addView(cv1);
}
#Override
protected void onDestroy() {
if (c != null) {
c.stopPreview();
c.release();
c = null;
}
super.onDestroy();
}
}
CameraView.java
package com.example.cse535a1;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.content.Context;
import android.hardware.Camera;
import java.io.IOException;
public class CameraView extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder holder_surface;
private Camera camera_selected;
MainActivity act1;
public CameraView(Context arg_context, MainActivity arg_activity) {
super(arg_context);
// camera_selected = arg_camera;
act1 = arg_activity;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
holder_surface = getHolder();
holder_surface.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
holder_surface.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder arg_holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
// Log.i("CAMNULL", "CAM IS : " + String.valueOf(camera_selected == null));
Camera c = null;
try {
c = Camera.open(0);
} catch (Exception e) {
Log.e("CAMERA", "Camera not opened");
}
act1.setCam(c);
camera_selected = c;
camera_selected.setPreviewDisplay(arg_holder);
// camera_selected.startPreview();
// Log.i("Cam", "surface creator");
} catch (IOException e) {
// Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
// if (camera_selected != null) {
// camera_selected.stopPreview();
// camera_selected.release();
// camera_selected = null;
// }
}
public void surfaceChanged(SurfaceHolder arg_holder, int arg_format, int arg_width, int arg_height) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (holder_surface.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
camera_selected.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
camera_selected.setPreviewDisplay(holder_surface);
camera_selected.startPreview();
} catch (Exception e){
// Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
}
I am trying to play some rtsp video on custom textureview.Video is playing for the first time but when I go to second activity where the same video is played (not reinstantiated,same session but with different textureview) there it is playing and coming back to first activity where I am setting the sureface again in onResume textureview is still showing the content where it was left for the first time.Interesting fact is like video is still playing, just it is not visible in the textureview and if I go to second screen like before it is showing.I tried both releasing the surfacetexture and not releasing, tried also releasing the surface which I am holding a reference in the first activity for future use, not any of those seems to work.What can be the possible reason?
in onResume I am checking if app is coming back from app drawer or full screen activity.Where making the app go to background by clicking home button and coming back to app from app drawer video is playing.Only when coming back from full screen it's not working.
StreamingActivity.java
package com.wiznsystems.android.activities;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.graphics.Point;
import android.graphics.SurfaceTexture;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.design.widget.Snackbar;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.Fragment;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.TextureView;
import android.view.View;
import android.view.Display;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.ProgressBar;
import android.widget.Toast;
import com.myeglu.android.R;
import com.myeglu.zoomview.ZoomTextureView;
import com.wiznsystems.android.App;
import com.wiznsystems.android.data.objects.FFMPEG;
import com.wiznsystems.android.utils.Constants;
import com.wiznsystems.android.utils.Events;
import com.wiznsystems.android.utils.FFMPEGPlayer;
import java.io.File;
import de.greenrobot.event.EventBus;
import hugo.weaving.DebugLog;
import timber.log.Timber;
/**
* An example full-screen activity that shows and hides the system UI (i.e.
* status bar and navigation/system bar) with user interaction.
*/
#SuppressWarnings("JniMissingFunction")
public class StreamingActivity extends Fragment implements TextureView.SurfaceTextureListener{
private static boolean loadedLibraries;
private boolean comingFromAppDrawer;
private boolean isComingFromFullScreen;
boolean anotherVideo=false;
boolean shouldTextureUpdate=false;
Surface surface;
public StreamingActivity(){
}
public static ZoomTextureView surfaceView;
private ProgressBar progressBar;
public static boolean isPlaying;
private int isInitialized;
public static int isFullScreenDisplayed=0;
private String url;
public boolean isFirstTime;
private FFMPEG ffmpeg;
private FrameLayout frameLayout;
private final String TAG=StreamingActivity.class.getSimpleName();
int w=0,h=0;
private boolean isFirstTimeForFullscreen=true;
private Button fullScreenButton;
FFMPEGPlayer ffmpegPlayer;
String buttonText="";
#Override
public void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
EventBus.getDefault().register(this);
url=getArguments().getString("url");
Log.d("ANURAN",url);
//getActivity().getWindow().addFlags(WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED);
ffmpegPlayer=App.getFFMPEG();
}
#Nullable
#Override
public View onCreateView(#NonNull LayoutInflater inflater, #Nullable ViewGroup container, #Nullable Bundle savedInstanceState) {
View view=inflater.inflate(R.layout.activity_streaming,null,false);
surfaceView = (ZoomTextureView) view.findViewById(R.id.textureView);
frameLayout=(FrameLayout)view.findViewById(R.id.streaming_framelayout);
progressBar = ((ProgressBar)view.findViewById(R.id.progressBar));
fullScreenButton=(Button)view.findViewById(R.id.fullScreenButton);
fullScreenButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
isFullScreenDisplayed = 1;
isComingFromFullScreen = true;
shouldTextureUpdate=true;
if (isFirstTimeForFullscreen) {
//ffmpegPlayer.libSetSurface(null);
isFirstTimeForFullscreen = false;
}
//surfaceView.getSurfaceTexture().release();
//surface.release();
progressBar.setVisibility(View.INVISIBLE);
goFullScreen();
} catch (Exception throwable) {
throwable.printStackTrace();
}
}
});
progressBar.setVisibility(View.VISIBLE);
Log.d("ANURAN","onCreateView called");
surfaceView.setSurfaceTextureListener(this);
return view;
}
#Override
public void onViewCreated(#NonNull View view, #Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
//Log.d(TAG,url);
}
#DebugLog
private void postInit() {
Events.PlayButtonBGChanger playButtonBGChanger=new Events.PlayButtonBGChanger();
if (isInitialized==0) {
playButtonBGChanger.setShouldChange(true);
initPlay();
progressBar.setVisibility(View.GONE);
} else if(isInitialized==-999){
playButtonBGChanger.setShouldChange(false);
progressBar.setVisibility(View.INVISIBLE);
Snackbar.make(frameLayout,"Please make sure you have stopped other playing videos before playing this one.",Snackbar.LENGTH_LONG).show();
}
else {
playButtonBGChanger.setShouldChange(false);
Snackbar.make(frameLayout,"Something went wrong while live streaming.Please try again later.",Snackbar.LENGTH_LONG).show();
}
EventBus.getDefault().post(playButtonBGChanger);
}
private void initPlay() {
try {
int[] res = ffmpegPlayer.libGetVideoRes();
Log.d("ANURAN", "res width " + res[0] + ": height " + res[1]);
if (res[0] <= 0) {
res[0] = 480;
}
if (res[1] <= 0) {
res[1] = 320;
}
int[] screenRes = getScreenRes();
int width, height;
float widthScaledRatio = screenRes[0] * 1.0f / res[0];
float heightScaledRatio = screenRes[1] * 1.0f / res[1];
if (widthScaledRatio > heightScaledRatio) {
//use heightScaledRatio
width = (int) (res[0] * heightScaledRatio);
height = screenRes[1];
} else {
//use widthScaledRatio
width = screenRes[0];
height = (int) (res[1] * widthScaledRatio);
}
Log.d(TAG, "width " + width + ",height:" + height);
w=width;
h=height;
updateSurfaceView(width, height);
try{
ffmpegPlayer.libSetup(width,height);
if(this.surface == null){
Log.d("ANURAN","surface is null");
}
if(anotherVideo)
ffmpegPlayer.libSetSurface(null);
ffmpegPlayer.libSetSurface(surface);
Log.d("ANURAN","libsetsurface set initPlay()");
}catch (Exception throwable){
Toast.makeText(getActivity(),"Something went wrong while live streaming.Try again",Toast.LENGTH_SHORT).show();
}
playMedia();
}catch (Exception throwable){
throwable.printStackTrace();
}
}
public FFMPEGPlayer getFFMPEGPlayer(){
return this.ffmpegPlayer;
}
private void playMedia() {
if(progressBar.getVisibility()==View.VISIBLE){
progressBar.setVisibility(View.GONE);
}
try{
ffmpegPlayer.libPlay();
}catch (Exception throwable){
Toast.makeText(getActivity(),"Something went wrong while live streaming.Try again",Toast.LENGTH_SHORT).show();
}
isPlaying = true;
CamerasActivity.isPlaying=true;
}
#DebugLog
private void updateSurfaceView(int pWidth, int pHeight) {
//update surfaceview dimension, this will cause the native window to change
Log.d("ANURAN UPDATE SURFACE", "width " + pWidth + ",height:" + pHeight);
FrameLayout.LayoutParams params = (FrameLayout.LayoutParams) surfaceView.getLayoutParams();
params.width = pWidth;
params.height = pHeight;
surfaceView.setLayoutParams(params);
surfaceView.requestLayout();
}
#DebugLog
#SuppressLint("NewApi")
private int[] getScreenRes() {
int[] res = new int[2];
Display display = getActivity().getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
res[0] = size.x;
res[1] = size.y;
return res;
}
public void stopPlaying() {
isPlaying = false;
try{
ffmpegPlayer.libStop();
// ffmpegPlayer.libSetSurface(null);
// surfaceView.getSurfaceTexture().release();
// surfaceView.getSurfaceTexture().detachFromGLContext();
}catch (Exception throwable){
}
}
#Override
public void onStop() {
// Toast.makeText(getActivity(),"onStop called",Toast.LENGTH_SHORT).show();
// stopPlaying();
comingFromAppDrawer=true;
// if(surfaceView.getSurfaceTexture() !=null){
// surfaceView.getSurfaceTexture().release();
//
// }
// if(surface !=null){
// surface.release();
// surface=null;
// }
// if(isFullScreenDisplayed==0){
// stopPlaying();
// }
Log.d("ANURAN onStop",surface.isValid()+"");
super.onStop();
}
#Override
public void onPause() {
// Toast.makeText(getActivity(),"onStop called",Toast.LENGTH_SHORT).show();
// stopPlaying();
isComingFromFullScreen=true;
super.onPause();
}
#Override
public void onResume() {
super.onResume();
if(isComingFromFullScreen){
progressBar.setVisibility(View.INVISIBLE);
if(surface !=null){
ffmpegPlayer.libSetSurface(null);
Log.d("ANURAN onResume","value of surface "+surface.isValid());
ffmpegPlayer.libSetSurface(surface);
}
}
else if(comingFromAppDrawer){
//stopPlaying();
progressBar.setVisibility(View.INVISIBLE);
if(surface !=null){
ffmpegPlayer.libSetSurface(null);
//ffmpegPlayer.libSetup(w,h);
ffmpegPlayer.libSetSurface(surface);
}
}
}
#Override
public void onDestroy() {
super.onDestroy();
stopPlaying();
}
#Override
public void onStart() {
super.onStart();
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
if(this.surface !=null) {
this.surface.release();
this.surface=null;
}
this.surface=new Surface(surface);
Log.d("ANURAN","surfacetexture available streaming activity");
new PlayVideo().execute();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
Log.d("ANURAN","surfacetexturesize changed streaming activity");
try{
w=width;
h=height;
this.surface.release();
this.surface=null;
this.surface=new Surface(surface);
if(isComingFromFullScreen){
//surfaceView.getHolder().getSurface().release();
updateSurfaceView(width, height);
ffmpegPlayer.libSetup(width,height);
ffmpegPlayer.libSetSurface(this.surface);
}
}catch (Exception throwable){
Toast.makeText(getActivity(),"Something went wrong while live streaming.Try again",Toast.LENGTH_SHORT).show();
}
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
Log.d("ANURAN","surfacetexture destroyed streaming activity");
// surfaceView.getSurfaceTexture().release();
// if(this.surface !=null){
// this.surface.release();
// this.surface=null;
// }
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
Log.d("ANURAN","surfacetexture updated streaming activity");
if(this.surface !=null){
this.surface.release();
this.surface=null;
this.surface=new Surface(surface);
}else{
this.surface=new Surface(surface);
}
}
public class PlayVideo extends AsyncTask<Void,Void,Void>{
#Override
protected Void doInBackground(Void... voids) {
try{
isInitialized=ffmpegPlayer.libInit(url);
}catch (Exception e){
e.printStackTrace();
Snackbar.make(frameLayout,"Exception Occured",Snackbar.LENGTH_SHORT).show();
}
return null;
}
#Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
isFirstTime=false;
postInit();
this.cancel(true);
}
}
public void onEvent(FFMPEG ffmpeg){
url = ffmpeg.getUrl();
progressBar.setVisibility(View.VISIBLE);
//stopPlaying();
anotherVideo=true;
new PlayVideo().execute();
}
public void onEvent(Events.UpdateUrl newurl){
url=newurl.getUrl();
}
public void onEvent(Events.StopPlayback event){
stopPlaying();
}
public void onEvent(Events.NotifyPlayer notifyPlayer){
Snackbar.make(frameLayout,"Please stop any running video before playing another one",Toast.LENGTH_SHORT).show();
}
private void goFullScreen(){
Intent intent=new Intent(getContext(),FullScreenActivity.class);
Bundle bundle=new Bundle();
bundle.putString("url",url);
intent.putExtras(bundle);
getActivity().startActivity(intent);
}
}
FullScreenActivity.java
package com.wiznsystems.android.activities;
import android.annotation.SuppressLint;
import android.graphics.Matrix;
import android.graphics.Point;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.Toast;
import com.myeglu.android.R;
import com.myeglu.zoomview.AngleView;
import com.myeglu.zoomview.MatrixChangeListener;
import com.myeglu.zoomview.ZoomTextureView;
import com.wiznsystems.android.App;
import com.wiznsystems.android.data.objects.FFMPEG;
import com.wiznsystems.android.utils.FFMPEGPlayer;
import uk.copywitchshame.senab.photoview.gestures.PhotoViewAttacher;
import hugo.weaving.DebugLog;
/**
* Created by anuran on 9/3/18.
*/
#SuppressWarnings("JniMissingFunction")
public class FullScreenActivity extends AppCompatActivity implements TextureView.SurfaceTextureListener, PhotoViewAttacher.OnMatrixChangedListener {
private ZoomTextureView surfaceView;
public static AngleView angleView;
private ProgressBar progressBar;
private PhotoViewAttacher photoViewAttacher;
public static boolean isPlaying;
private boolean isInitialized;
private String url;
private FrameLayout frameLayout;
Surface surface;
private final String TAG=StreamingActivity.class.getSimpleName();
public int backCounter=0;
FFMPEGPlayer ffmpegPlayer;
boolean fromADorSL=false;
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().requestFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_fullscreen);
surfaceView = (ZoomTextureView) findViewById(R.id.textureView);
angleView=(AngleView)findViewById(R.id.render_angle_view);
surfaceView.setSurfaceTextureListener(this);
frameLayout=(FrameLayout)findViewById(R.id.streaming_framelayout);
progressBar = ((ProgressBar)findViewById(R.id.progressBar));
progressBar.setVisibility(View.VISIBLE);
url=getIntent().getExtras().getString("url");
//new PlayVideo().execute();
ffmpegPlayer= App.getFFMPEG();
Log.d("ANURAN","fullscreen onCreate");
}
// #DebugLog
// private void postInit() {
// if (isInitialized) {
// initPlay();
// progressBar.setVisibility(View.GONE);
// } else {
// finish();
// }
// }
// private void initPlay() {
//
// try{
// int[] res = FFMPEGPlayer.libGetVideoRes();
// Log.d("ANURAN", "res width " + res[0] + ": height " + res[1]);
// if (res[0] <= 0) {
// res[0] = 480;
// }
// if (res[1] <= 0) {
// res[1] = 320;
// }
// int[] screenRes = getScreenRes();
// int width, height;
// float widthScaledRatio = screenRes[0] * 1.0f / res[0];
// float heightScaledRatio = screenRes[1] * 1.0f / res[1];
// if (widthScaledRatio > heightScaledRatio) {
// //use heightScaledRatio
// width = (int) (res[0] * heightScaledRatio);
// height = screenRes[1];
// } else {
// //use widthScaledRatio
// width = screenRes[0];
// height = (int) (res[1] * widthScaledRatio);
// }
// Log.d(TAG, "width " + width + ",height:" + height);
// updateSurfaceView(width, height);
// FFMPEGPlayer.libSetup(width, height);
// playMedia();
//
// photoViewAttacher = new PhotoViewAttacher(surfaceView, width, height);
// photoViewAttacher.setScaleType(ImageView.ScaleType.CENTER_CROP);
// photoViewAttacher.setOnMatrixChangeListener(this);
// photoViewAttacher.update();
// }catch (Exception e){
//
// }
// }
// private void playMedia() {
//
// try{
// if(progressBar.getVisibility()==View.VISIBLE){
// progressBar.setVisibility(View.GONE);
// }
// FFMPEGPlayer.libPlay();
// isPlaying = true;
// CamerasActivity.isPlaying=true;
// }catch (Exception e){
//
// }
// }
// #DebugLog
// private void updateSurfaceView(int pWidth, int pHeight) {
// //update surfaceview dimension, this will cause the native window to change
// Log.d("ANURAN UPDATE SURFACE", "width " + pWidth + ",height:" + pHeight);
// FrameLayout.LayoutParams params = (FrameLayout.LayoutParams) surfaceView.getLayoutParams();
// params.width = pWidth;
// params.height = pHeight;
// surfaceView.setLayoutParams(params);
// }
#Override
public void onBackPressed() {
if(backCounter==1){
super.onBackPressed();
}else{
//surface.release();
StreamingActivity.isFullScreenDisplayed=0;
surfaceView.getSurfaceTexture().release();
//ffmpegPlayer.libSetSurface(null);
if(this.surface !=null){
this.surface.release();
this.surface=null;
}
backCounter++;
Toast.makeText(FullScreenActivity.this,"Press back again to quit full screen",Toast.LENGTH_SHORT).show();
}
}
#DebugLog
#SuppressLint("NewApi")
private int[] getScreenRes() {
int[] res = new int[2];
Display display = getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
res[0] = size.x;
res[1] = size.y;
return res;
}
private void stopPlaying() {
isPlaying = false;
try{
ffmpegPlayer.libStop();
}catch (Exception e){
}
}
#Override
public void onStop() {
super.onStop();
fromADorSL=true;
}
#Override
public void onResume() {
super.onResume();
if(fromADorSL){
ffmpegPlayer.libSetSurface(null);
ffmpegPlayer.libSetSurface(this.surface);
}
}
#Override
public void onStart() {
super.onStart();
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
//Toast.makeText(FullScreenActivity.this,"SurfaceTexture available",Toast.LENGTH_SHORT).show();
//updateSurfaceView(width, height);
try{
this.surface=new Surface(surface);
ffmpegPlayer.libSetup(width, height);
ffmpegPlayer.libSetSurface(this.surface);
photoViewAttacher = new PhotoViewAttacher(surfaceView, width, height);
photoViewAttacher.setScaleType(ImageView.ScaleType.CENTER_CROP);
photoViewAttacher.setOnMatrixChangeListener(this);
photoViewAttacher.update();
progressBar.setVisibility(View.INVISIBLE);
angleView.setVisibility(View.VISIBLE);
}catch (Exception e){
}
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
try{
Toast.makeText(FullScreenActivity.this,"SurfaceTexture changed",Toast.LENGTH_SHORT).show();
if (photoViewAttacher != null ) {
photoViewAttacher.update ();
}
if(this.surface !=null){
this.surface.release();
}
this.surface=null;
this.surface=new Surface(surface);
ffmpegPlayer.libSetup(width,height);
ffmpegPlayer.libSetSurface(this.surface);
}catch (Exception e){
}
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
try{
// this.surface.release();
// this.surface=null;
surfaceView.getSurfaceTexture().release();
if(this.surface !=null){
this.surface.release();
this.surface=null;
}
//ffmpegPlayer.libSetSurface(null);
}catch (Exception e){
}
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
if(this.surface !=null){
this.surface.release();
this.surface=null;
this.surface=new Surface(surface);
}
}
#Override
public void onMatrixChanged(Matrix matrix, RectF rectF) {
float maxMovement = (rectF.width() - surfaceView.getWidth());
float middle = surfaceView.getWidth() * 0.5f + surfaceView.getLeft();
float currentMiddle = rectF.width() * 0.5f + rectF.left;
float angle=(-(int) ((currentMiddle - middle) * 100 / maxMovement));
Log.d("ANURAN",angle+"");
angleView.setCurrentProgress((int)angle);
}
// public class PlayVideo extends AsyncTask<Void,Void,Void> {
//
// #Override
// protected Void doInBackground(Void... voids) {
// try{
// isInitialized=(FFMPEGPlayer.libInit(url)==0);
// }catch (Exception e){
// e.printStackTrace();
// }
// return null;
// }
//
// #Override
// protected void onPostExecute(Void aVoid) {
// super.onPostExecute(aVoid);
// postInit();
// this.cancel(true);
// }
// }
}
I am working on an android app and am trying to figure out how to get a popup confirmation window to display with confirm and cancel buttons when a button is pressed.
Here is the creation of the alert.
final AlertDialog.Builder alertBuilder = new AlertDialog.Builder(activity);
alertBuilder.setTitle("Your Title");
alertBuilder.setMessage("Your Messages");
alertBuilder.setPositiveButton("Confirm", new OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
// Do something with value!
}
});
alertBuilder.setNegativeButton("Cancel", new OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
// Canceled.
}
});
Here is the call to show.
Sprite p2 = new Sprite(goldMult, 25, 450, WIDTH, HEIGHT,
resourceManager.spriteRegion, vbom) {
/**
* #see org.andengine.entity.shape.Shape#onAreaTouched(org.andengine.input.touch.TouchEvent, float, float)
*/
#Override
public boolean onAreaTouched(final TouchEvent sceneTouchEvent, final float touchAreaLocalX,
final float touchAreaLocalY) {
AlertDialog alert = alertBuilder.create();
alert.show();
I am getting this exception:
java.lang.RuntimeException: Can't create handler inside thread that has not called Looper.prepare()
I saw a post with a similar issue here
However I am confused on how to implement this with my onAreaClicked event.
Thanks for any help in advanced
andengine uses opengl so you might want to create a handler to execute your dialog on the opengl thread
public Handler handler;
then
handler = new Handler() {
#Override
public void handleMessage(Message msg) {
switch (msg.what) {
case 0:
AlertDialog alert = alertBuilder.create();
alert.show();
break;
}
}};
then to use it
handler.sendMessage(Message.obtain(handler, 0));
something along those lines
EDIT: class using handler for looper prepare opengl
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.MalformedURLException;
import java.nio.IntBuffer;
import java.util.List;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import com.facebook.android.AsyncFacebookRunner;
import com.facebook.android.AsyncFacebookRunner.RequestListener;
import com.facebook.android.DialogError;
import com.facebook.android.Facebook;
import com.facebook.android.FacebookError;
import com.facebook.android.Facebook.DialogListener;
import pic.puzzle.framework.Audio;
import pic.puzzle.framework.FileIO;
import pic.puzzle.framework.Game;
import pic.puzzle.framework.Graphics;
import pic.puzzle.framework.Input;
import pic.puzzle.framework.Screen;
import pic.puzzle.picturepuzzle.GameOverScreen;
import pic.puzzle.picturepuzzle.GameOverScreenCustom;
import pic.puzzle.picturepuzzle.PicturePuzzleScreen;
import pic.puzzle.picturepuzzle.PuzzleScreen;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.graphics.Bitmap;
import android.net.Uri;
import android.opengl.GLSurfaceView;
import android.opengl.GLSurfaceView.Renderer;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Toast;
public abstract class GLGame extends Activity implements Game, Renderer {
enum GLGameState {
Initialized,
Running,
Paused,
Finished,
Idle
}
GLSurfaceView glView;
GLGraphics glGraphics;
Audio audio;
Input input;
FileIO fileIO;
Screen screen;
GLGameState state = GLGameState.Initialized;
Object stateChanged = new Object();
long startTime = System.nanoTime();
WakeLock wakeLock;
public static Bitmap lastscreenshot;
public static boolean screenshot = false,finish = false;
public static int width, height;
public static Handler handler;
public static boolean share = false;
String APP_ID = ("567944629883125");
public Facebook fb;
public byte[] data;
public static int custom = 0;
public Uri uri;
String here;
#SuppressLint("HandlerLeak")
#SuppressWarnings("deprecation")
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
glView = new GLSurfaceView(this);
glView.setEGLConfigChooser(8 , 8, 8, 8, 16, 0);
glView.setRenderer(this);
setContentView(glView);
fb = new Facebook(APP_ID);
glGraphics = new GLGraphics(glView);
fileIO = new AndroidFileIO(getAssets());
audio = new AndroidAudio(this);
input = new AndroidInput(this, glView, 1, 1);
PowerManager powerManager = (PowerManager) getSystemService(Context.POWER_SERVICE);
wakeLock = powerManager.newWakeLock(PowerManager.FULL_WAKE_LOCK, "GLGame");
handler = new Handler() {
#Override
public void handleMessage(Message msg) {
switch (msg.what) {
case 0:
if(custom == 0)
uri = GameOverScreen.pngUri;
else if(custom == 1)
uri = GameOverScreenCustom.pngUri;
if(custom == 0){
here = "Moves: " + GameOverScreen.moves + " " + "Time: " + GameOverScreen.time;}
else if(custom == 1){
here = "Moves: " + GameOverScreenCustom.moves + " " + "Time: " + GameOverScreenCustom.time;}
Intent shareIntent = new Intent(android.content.Intent.ACTION_SEND);
shareIntent.setType("text/plain");
shareIntent.putExtra(android.content.Intent.EXTRA_TEXT,
here);
shareIntent.setType("image/png");
shareIntent.putExtra(android.content.Intent.EXTRA_STREAM,
uri); //Share the image on Facebook
PackageManager pm = getApplicationContext().getPackageManager();
List<ResolveInfo> activityList = pm.queryIntentActivities(
shareIntent, 0);
for (final ResolveInfo app : activityList) {
if ((app.activityInfo.name).contains("facebook")) {
final ActivityInfo activity = app.activityInfo;
final ComponentName name = new ComponentName(
activity.applicationInfo.packageName,
activity.name);
shareIntent.addCategory(Intent.CATEGORY_LAUNCHER);
shareIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK
| Intent.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED);
shareIntent.setComponent(name);
startActivity(shareIntent);
break;
}
}
}
}
};
}
public void onResume() {
super.onResume();
glView.onResume();
wakeLock.acquire();
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
glGraphics.setGL(gl);
synchronized(stateChanged) {
if(state == GLGameState.Initialized)
screen = getStartScreen();
state = GLGameState.Running;
screen.resume();
startTime = System.nanoTime();
}
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLGame.width = width;
GLGame.height = height;
}
#Override
public void onDrawFrame(GL10 gl) {
GLGameState state = null;
if(finish){
finish = false;
finish();
}
if(share) {
share = false;
handler.sendMessage(Message.obtain(handler, 0));
}
synchronized(stateChanged) {
state = this.state;
}
if(state == GLGameState.Running) {
float deltaTime = (System.nanoTime()-startTime) / 1000000000.0f;
startTime = System.nanoTime();
screen.update(deltaTime);
screen.present(deltaTime);
if(screenshot){
lastscreenshot = SavePixels(0,0,width,height,gl);
lastscreenshot = Bitmap.createScaledBitmap(lastscreenshot, 320, 480, true);
screenshot = false;
}
}
if(state == GLGameState.Paused) {
screen.pause();
synchronized(stateChanged) {
this.state = GLGameState.Idle;
stateChanged.notifyAll();
}
}
if(state == GLGameState.Finished) {
screen.pause();
screen.dispose();
synchronized(stateChanged) {
this.state = GLGameState.Idle;
stateChanged.notifyAll();
}
}
}
#Override
public void onPause() {
synchronized(stateChanged) {
if(isFinishing())
state = GLGameState.Finished;
else
state = GLGameState.Paused;
while(true) {
try {
stateChanged.wait();
break;
} catch(InterruptedException e) {
}
}
}
wakeLock.release();
glView.onPause();
super.onPause();
}
#Override
public void onDestroy(){
if(lastscreenshot != null)
lastscreenshot.recycle();
if(PicturePuzzleScreen.pic != null)
PicturePuzzleScreen.pic.recycle();
if(GameOverScreen.finalbitmap != null)
GameOverScreen.finalbitmap.recycle();
if(GameOverScreenCustom.finalbitmap != null)
GameOverScreenCustom.finalbitmap.recycle();
System.gc();
super.onDestroy();
}
public static Bitmap SavePixels(int x, int y, int w, int h, GL10 gl)
{
int b[]=new int[w*(y+h)];
int bt[]=new int[w*h];
IntBuffer ib=IntBuffer.wrap(b);
ib.position(0);
gl.glReadPixels(x, 0, w, y+h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, ib);
for(int i=0, k=0; i<h; i++, k++)
{//remember, that OpenGL bitmap is incompatible with Android bitmap
//and so, some correction need.
for(int j=0; j<w; j++)
{
int pix=b[i*w+j];
int pb=(pix>>16)&0xff;
int pr=(pix<<16)&0x00ff0000;
int pix1=(pix&0xff00ff00) | pr | pb;
bt[(h-k-1)*w+j]=pix1;
}
}
Bitmap sb=Bitmap.createBitmap(bt, w, h, Bitmap.Config.RGB_565);
return sb;
}
public GLGraphics getGLGraphics() {
return glGraphics;
}
#Override
public Input getInput() {
return input;
}
#Override
public FileIO getFileIO() {
return fileIO;
}
#Override
public Graphics getGraphics() {
throw new IllegalStateException("We are using OpenGL!");
}
#Override
public Audio getAudio() {
return audio;
}
#Override
public void setScreen(Screen screen) {
if (screen == null)
throw new IllegalArgumentException("Screen must not be null");
this.screen.pause();
this.screen.dispose();
screen.resume();
screen.update(0);
this.screen = screen;
}
#Override
public Screen getCurrentScreen() {
return screen;
}
#SuppressWarnings("deprecation")
#Override
protected void onActivityResult(int requestCode,int resultCode,Intent data)
{
super.onActivityResult(requestCode, resultCode, data);
fb.authorizeCallback(requestCode, resultCode, data);
}
}
I figured out the answer by wrapping the Alert in a new thread
activity.runOnUiThread(new Runnable() {
#Override
public void run() {
AlertDialog.Builder alert = new AlertDialog.Builder(activity);
alert.setTitle("");
alert.setMessage("");
alert.setPositiveButton("Buy", new OnClickListener() {
#Override
public void onClick(DialogInterface arg0, int arg1) {
}
});
alert.setNegativeButton("Cancel", new OnClickListener() {
#Override
public void onClick(DialogInterface arg0, int arg1) {
}
});
alert.show();
}
});
My code get different behavior when the application runs on Android 2.2 and Android 3.0, code built for android 2.2 and provides no build errors in Eclipse. The Android 2.2 does not seem OnCompletionListener work as expected. Am I doing something wrong or is there any difference between Android 2.2 and 3.0 regarding OnCompletionListener.
My sample code is from http://developer.android.com/resources/samples/ApiDemos/src/com/example/android/apis/media/MediaPlayerDemo_Video.html
import android.app.Activity;
import android.content.Context;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnBufferingUpdateListener;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.media.MediaPlayer.OnVideoSizeChangedListener;
import android.os.Bundle;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;
import android.view.WindowManager;
public class MediaPlayerActivity extends Activity implements OnBufferingUpdateListener, OnCompletionListener, OnPreparedListener, OnVideoSizeChangedListener, SurfaceHolder.Callback {
private final String TAG = "MediaPlayer";
private SurfaceView m_preview;
private SurfaceHolder m_holder;
private Bundle m_extras;
private String m_path;
private MediaPlayer m_mediaPlayer;
private int m_videoWidht;
private int m_videoHeight;
private boolean m_isVideoReadyToPlay;
private boolean m_isVideoSizeKnown;
private static final int LOCAL_VIDEO = 1;
private static final int STREAM_VIDEO = 2;
private static final String MEDIA = "media";
private static final String FILE = "/mnt/sdcard/file1.avi";
private WakeLock m_keepscreen;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(TAG, "MediaPlayer::onCreate");
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.videoplayer);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
m_keepscreen = pm.newWakeLock( PowerManager.FULL_WAKE_LOCK | PowerManager.ON_AFTER_RELEASE | PowerManager.ACQUIRE_CAUSES_WAKEUP,"MediaPlayerActivity");
m_preview = (SurfaceView) findViewById(R.id.surface);
m_holder = m_preview.getHolder();
m_holder.addCallback(this);
m_holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
m_extras = getIntent().getExtras();
m_keepscreen.acquire();
}
private void playVideo(Integer media){
doCleanUp();
try{
switch (media){
case LOCAL_VIDEO :
m_path = FILE;
Log.i(TAG, "File: "+FILE);
if(m_path == ""){
//
}
break;
case STREAM_VIDEO:
Log.i(TAG, "PlayVideo, cant stream video yet");
break;
}
m_mediaPlayer = new MediaPlayer();
m_mediaPlayer.setDataSource(m_path);
m_mediaPlayer.setDisplay(m_holder);
m_mediaPlayer.prepare();
m_mediaPlayer.setOnBufferingUpdateListener(this);
m_mediaPlayer.setOnPreparedListener(this);
m_mediaPlayer.setOnCompletionListener(this);
/*
m_mediaPlayer.setOnCompletionListener(new OnCompletionListener(){
public void onCompletion(MediaPlayer mp) {
Log.i(TAG, "MediaPlayer::onCompletion");
startVideoPlayback();
}
});
*/
m_mediaPlayer.setOnVideoSizeChangedListener(this);
m_mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
}catch (Exception e){
Log.i(TAG, "Error: "+e.getMessage());
}
}
private void doCleanUp() {
m_videoWidht = 0;
m_videoHeight = 0;
m_isVideoReadyToPlay = false;
m_isVideoSizeKnown = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.i(TAG, "MediaPlayer::surfaceChanged");
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "MediaPlayer::surfaceCreated");
playVideo(1);
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "MediaPlayer::surfaceDestroyed");
}
#Override
public void onVideoSizeChanged(MediaPlayer mp, int width, int height) {
Log.i(TAG, "MediaPlayer::videoSizeChanged");
if( width == 0 || height == 0 ){
Log.i(TAG, "invalid video with("+width+")or height ("+height+")");
}
m_isVideoSizeKnown = true;
m_videoWidht = width;
m_videoHeight = height;
if(m_isVideoReadyToPlay && m_isVideoSizeKnown){
startVideoPlayback();
}
}
private void startVideoPlayback() {
Log.i(TAG, "StartVideoPlayback");
m_holder.setFixedSize(m_videoWidht, m_videoHeight);
m_mediaPlayer.start();
}
#Override
public void onPrepared(MediaPlayer mp) {
Log.i(TAG, "MediaPlayer::onPrepared");
m_isVideoReadyToPlay = true;
if(m_isVideoReadyToPlay && m_isVideoSizeKnown){
startVideoPlayback();
}
}
#Override
public void onCompletion(MediaPlayer mp) {
//android 2.2 never get here
Log.i(TAG, "MediaPlayer::onCompletion");
startVideoPlayback();
}
#Override
public void onBufferingUpdate(MediaPlayer mp, int percent) {
Log.i(TAG, "MediaPlayer::onBufferingUpdate: "+percent+" %");
}
#Override
protected void onPause() {
super.onPause();
releaseMediaPlayer();
doCleanUp();
}
#Override
protected void onDestroy() {
super.onDestroy();
releaseMediaPlayer();
doCleanUp();
}
private void releaseMediaPlayer() {
if (m_mediaPlayer != null) {
m_mediaPlayer.release();
m_mediaPlayer = null;
m_keepscreen.release();
}
}
}