How would I stop music playing so call the OnPause if there is a incoming or outgoing call?
So once there is a call or they make a call it will stop the music by calling OnPause.
package com.beanie.samples.streaming;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import com.beanie.samples.streaming.R;
import com.beanie.samples.streaming.MyService;
import android.app.Activity;
import android.app.IntentService;
import android.app.Service;
import android.content.Intent;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnBufferingUpdateListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.IBinder;
import android.os.Looper;
import android.os.Message;
import android.telephony.TelephonyManager;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ProgressBar;
import android.widget.Toast;
;
public class HomeActivity extends Activity implements OnClickListener {
private static final String TAG = "MyServices";
private final static String RADIO_STATION_URL = "http://195.154.237.162:8936/";
private static final String START_STICKY = null;
Button buttonPlay, buttonStopPlay;
/** Called when the activity is first created.
* Keep this here all the application will stop working */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
initializeUIElements();
initializeMediaPlayer();
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
buttonPlay = (Button) findViewById(R.id.buttonPlay);
buttonStopPlay = (Button) findViewById(R.id.buttonStopPlay);
buttonPlay.setOnClickListener(this);
buttonStopPlay.setOnClickListener(this);
}
private ProgressBar playSeekBar;
private MediaPlayer player;
private InputStream recordingStream;
private RecorderThread recorderThread;
private boolean isRecording = false;
private void initializeUIElements() {
playSeekBar = (ProgressBar) findViewById(R.id.progressBar1);
playSeekBar.setMax(100);
playSeekBar.setVisibility(View.INVISIBLE);
buttonPlay = (Button) findViewById(R.id.buttonPlay);
buttonPlay.setOnClickListener(this);
buttonStopPlay = (Button) findViewById(R.id.buttonStopPlay);
buttonStopPlay.setEnabled(false);
buttonStopPlay.setOnClickListener(this);
}
public void getTelephonyOverview(final TelephonyManager telMgr)
{
int callState = telMgr.getCallState();
String callStateString = "NA";
switch (callState) {
case TelephonyManager.CALL_STATE_IDLE:
getLastCallLogEntry(Appinfo.this);
break;
case TelephonyManager.CALL_STATE_OFFHOOK:
Log.i("Call","started");
break;
case TelephonyManager.CALL_STATE_RINGING:
Log.i("Call","ringing");
break;
}
}
public void startPlaying() {
buttonStopPlay.setEnabled(true);
buttonPlay.setEnabled(false);
playSeekBar.setVisibility(View.VISIBLE);
player.prepareAsync();
player.setOnPreparedListener(new OnPreparedListener() {
public void onPrepared(MediaPlayer mp) {
player.start();
}
});
}
private void onBufferingUpdate(MediaPlayer mp, int percent) {
playSeekBar.setSecondaryProgress(percent);
Toast.makeText(this, "Buffering ", percent).show();
Log.i("Buffering", "" + percent);
}
public void onClick(View v) {
if (v == buttonPlay) {
onBufferingUpdate(player, 0);
Log.d(TAG, "onClick: starting srvice");
startPlaying();
player.setLooping(false); // Set looping
}
else if (v == buttonStopPlay) {
Log.d(TAG, "onClick: stopping srvice");
stopPlaying();
}
}
private void stopPlaying() {
if (player.isPlaying()) {
player.stop();
player.release();
initializeMediaPlayer();
}
buttonPlay.setEnabled(true);
buttonStopPlay.setEnabled(false);
playSeekBar.setVisibility(View.INVISIBLE);
stopRecording();
}
private void initializeMediaPlayer() {
player = new MediaPlayer();
try {
player.setDataSource(RADIO_STATION_URL);
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void startRecording() {
BufferedOutputStream writer = null;
try {
URL url = new URL(RADIO_STATION_URL);
URLConnection connection = url.openConnection();
final String FOLDER_PATH = Environment.getExternalStorageDirectory().getAbsolutePath()
+ File.separator + "Songs";
File folder = new File(FOLDER_PATH);
if (!folder.exists()) {
folder.mkdir();
}
writer = new BufferedOutputStream(new FileOutputStream(new File(FOLDER_PATH
+ File.separator + "sample.mp3")));
recordingStream = connection.getInputStream();
final int BUFFER_SIZE = 100;
byte[] buffer = new byte[BUFFER_SIZE];
while (recordingStream.read(buffer, 0, BUFFER_SIZE) != -1 && isRecording) {
writer.write(buffer, 0, BUFFER_SIZE);
writer.flush();
}
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
recordingStream.close();
writer.flush();
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void stopRecording() {
try {
isRecording = false;
if (recordingStream != null) {
recordingStream.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
private class RecorderThread extends Thread {
#Override
public void run() {
isRecording = true;
startRecording();
}
};
}
how would I stop music playing so call the OnPause
onPause() and onStop() are lifecycle methods that will be called automatically; you do not call them manually. You should override them and add your code that would stop the music.
You don't need to do anything in your lifecycle methods to stop the music. Android provides you with a mechanism to handle these cases.
The mechanism is called Audio Focus. Apps request Audio Focus when they want to play audio (or video!). When another app needs audio, it requests the Audio Focus. And the subscribed parties receive a broadcast saying: Hey, you've lost the audio focus. If the focus was only temporarily lost (like when a notification comes and it plays a short audio beep) then the Broadcast says: Hey, you've lost the audio focus but duck the audio, I'll give it back in a moment.
The idea in that case is that you lower (duck) your volume to 0.1 (for example) to let the other sound (beep) be louder.
After the "beep" from the notification is done, the broadcast says: hey, here's the audio focus you had earlier.
At this point if you're still playing, you'd restore the volume to what it was before.
That's the concept behind audio focus. It's perfectly explained in the Android Docs (and the Android Samples) albeit the Google code is rather messy and inconsistent, it does work out of the box. They do a lot of crappy abstraction and some strange code practices but irregardless of that, the code works fine.
The same goes for a phone call, you subscribe to it and tell your service to pause/resume the sound when there's a call in progress. Not when your app goes to the background (onpause) because that's unreliable and doesn't mean at all that you app went to the background.
Here's the Android Official Docs for Audio Focus.
Related
Been through hell coding my first android app.
E/Camera: Error 2 in Logcat whenever camera is used (the error code is for multiple camera uses)
I have attached the entire project if you want to and can run it. Kindly help.
Link to Project on Google Drive
I am also attaching the code to the main files in this post if you want to view it directly.
Here is MainActivity.java
package com.example.cse535a1;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.content.FileProvider;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.MotionEvent;
import android.widget.Button;
import android.content.Context;
import android.view.View;
import android.hardware.*;
import android.widget.FrameLayout;
import android.widget.TextView;
import org.opencv.core.*;
import org.opencv.videoio.VideoCapture;
import java.io.File;
public class MainActivity extends AppCompatActivity implements SensorEventListener {
private Camera c;
private CameraView cv1;
private FrameLayout view_camera;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (!(getApplicationContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA))) {
this.finish();
System.exit(0);
}
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
VideoCapture video_capture;
Button button_symptoms = (Button)findViewById(R.id.button_symptoms);
Button button_upload_signs = (Button)findViewById(R.id.button_upload_signs);
Button button_measure_heart_rate = (Button)findViewById(R.id.button_measure_heart_rate);
Button button_measure_respiratory_rate = (Button)findViewById(R.id.button_measure_respiratory_rate);
c = getcam();
cv1 = new CameraView(getApplicationContext(), c);
view_camera = (FrameLayout)findViewById(R.id.view_camera);
view_camera.addView(cv1);
TextView finger_on_sensor = (TextView)findViewById(R.id.text_finger_on_sensor);
finger_on_sensor.setVisibility(View.INVISIBLE);
finger_on_sensor.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View arg_view, MotionEvent arg_me) {
finger_on_sensor.setVisibility(View.INVISIBLE);
File file_video = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/video_finger.mp4");
final int VIDEO_CAPTURE = 1;
Intent intent_record_video = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
intent_record_video.putExtra(MediaStore.EXTRA_DURATION_LIMIT, 45);
Uri fileUri = FileProvider.getUriForFile(MainActivity.this, "com.example.cse535a1.provider", file_video);
intent_record_video.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
c.release();
startActivityForResult(intent_record_video, VIDEO_CAPTURE);
c.stopPreview();
return true;
}
});
button_symptoms.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
Intent intent = new Intent(getApplicationContext(), Loggin_symptoms.class);
startActivity(intent);
}
});
button_upload_signs.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
}
});
button_measure_heart_rate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
finger_on_sensor.setVisibility(View.VISIBLE);
}
});
button_measure_respiratory_rate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
SensorManager manager_sensor = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
Sensor sensor_accelerometer = manager_sensor.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
manager_sensor.registerListener(MainActivity.this, sensor_accelerometer, SensorManager.SENSOR_DELAY_NORMAL);
}
});
}
#Override
public void onSensorChanged(SensorEvent arg_event) {
float x = arg_event.values[0];
float y = arg_event.values[1];
float z = arg_event.values[2];
Log.i("ACCELEROMETER", String.valueOf(x) + ' ' + String.valueOf(y) + ' ' + String.valueOf(z));
}
#Override
public void onAccuracyChanged(Sensor arg_sensor, int arg_accuracy) {
}
public Camera getcam() {
Camera c = null;
try { c = Camera.open(0); }
catch (Exception e) {
}
return c;
}
#Override
protected void onResume() {
super.onResume();
c = getcam();
cv1 = new CameraView(getApplicationContext(), c);
view_camera.addView(cv1);
}
#Override
protected void onDestroy() {
c.stopPreview();
c.release();
c = null;
super.onDestroy();
}
}
Here is CameraView.java
package com.example.cse535a1;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.content.Context;
import android.hardware.Camera;
import java.io.IOException;
public class CameraView extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder holder_surface;
private Camera camera_selected;
public CameraView(Context arg_context, Camera arg_camera) {
super(arg_context);
// Log.i("Cam", "constructor");
camera_selected = arg_camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
holder_surface = getHolder();
holder_surface.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
holder_surface.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder arg_holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
camera_selected.setPreviewDisplay(arg_holder);
camera_selected.startPreview();
// Log.i("Cam", "surface creator");
} catch (IOException e) {
// Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
}
public void surfaceChanged(SurfaceHolder arg_holder, int arg_format, int arg_width, int arg_height) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (holder_surface.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
camera_selected.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
camera_selected.setPreviewDisplay(holder_surface);
camera_selected.startPreview();
} catch (Exception e){
// Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
}
Here are updated files. It seems removing statements which release camera seems to remove errors of using camera after releasing. Unlocking the camera in onPause() removed-> E/Camera: Error 2
MainActivity.java
package com.example.cse535a1;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.content.FileProvider;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.MotionEvent;
import android.widget.Button;
import android.content.Context;
import android.view.View;
import android.hardware.*;
import android.widget.FrameLayout;
import android.widget.TextView;
import org.opencv.core.*;
import org.opencv.videoio.VideoCapture;
import java.io.File;
public class MainActivity extends AppCompatActivity implements SensorEventListener {
private Camera c;
private CameraView cv1;
private FrameLayout view_camera;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (!(getApplicationContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA))) {
this.finish();
System.exit(0);
}
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
VideoCapture video_capture;
Button button_symptoms = (Button)findViewById(R.id.button_symptoms);
Button button_upload_signs = (Button)findViewById(R.id.button_upload_signs);
Button button_measure_heart_rate = (Button)findViewById(R.id.button_measure_heart_rate);
Button button_measure_respiratory_rate = (Button)findViewById(R.id.button_measure_respiratory_rate);
cv1 = new CameraView(getApplicationContext(), this);
view_camera = (FrameLayout)findViewById(R.id.view_camera);
view_camera.addView(cv1);
TextView finger_on_sensor = (TextView)findViewById(R.id.text_finger_on_sensor);
finger_on_sensor.setVisibility(View.INVISIBLE);
finger_on_sensor.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View arg_view, MotionEvent arg_me) {
finger_on_sensor.setVisibility(View.INVISIBLE);
File file_video = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/video_finger.mp4");
final int VIDEO_CAPTURE = 101;
Intent intent_record_video = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
intent_record_video.putExtra(MediaStore.EXTRA_DURATION_LIMIT, 45);
Uri fileUri = FileProvider.getUriForFile(MainActivity.this, "com.example.cse535a1.provider", file_video);
intent_record_video.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
startActivityForResult(intent_record_video, VIDEO_CAPTURE);
return false;
}
});
button_symptoms.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
Intent intent = new Intent(getApplicationContext(), Loggin_symptoms.class);
startActivity(intent);
}
});
button_upload_signs.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
}
});
button_measure_heart_rate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
finger_on_sensor.setVisibility(View.VISIBLE);
}
});
button_measure_respiratory_rate.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg_view) {
SensorManager manager_sensor = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
Sensor sensor_accelerometer = manager_sensor.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
manager_sensor.registerListener(MainActivity.this, sensor_accelerometer, SensorManager.SENSOR_DELAY_NORMAL);
}
});
}
public void setCam(Camera arg_camera) {
c = arg_camera;
}
#Override
public void onSensorChanged(SensorEvent arg_event) {
float x = arg_event.values[0];
float y = arg_event.values[1];
float z = arg_event.values[2];
Log.i("ACCELEROMETER", String.valueOf(x) + ' ' + String.valueOf(y) + ' ' + String.valueOf(z));
}
#Override
public void onAccuracyChanged(Sensor arg_sensor, int arg_accuracy) {
}
public Camera getcam() {
Camera c = null;
try { c = Camera.open(0); }
catch (Exception e) {
}
return c;
}
#Override
protected void onPause() {
super.onPause();
c.unlock();
// if (c != null) {
// c.stopPreview();
// c.release();
// c = null;
// }
}
#Override
protected void onResume() {
super.onResume();
// if (c != null) {
// c.stopPreview();
// c.release();
// c = null;
// }
// cv1 = new CameraView(getApplicationContext(), this);
// view_camera.addView(cv1);
}
#Override
protected void onDestroy() {
if (c != null) {
c.stopPreview();
c.release();
c = null;
}
super.onDestroy();
}
}
CameraView.java
package com.example.cse535a1;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.content.Context;
import android.hardware.Camera;
import java.io.IOException;
public class CameraView extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder holder_surface;
private Camera camera_selected;
MainActivity act1;
public CameraView(Context arg_context, MainActivity arg_activity) {
super(arg_context);
// camera_selected = arg_camera;
act1 = arg_activity;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
holder_surface = getHolder();
holder_surface.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
holder_surface.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder arg_holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
// Log.i("CAMNULL", "CAM IS : " + String.valueOf(camera_selected == null));
Camera c = null;
try {
c = Camera.open(0);
} catch (Exception e) {
Log.e("CAMERA", "Camera not opened");
}
act1.setCam(c);
camera_selected = c;
camera_selected.setPreviewDisplay(arg_holder);
// camera_selected.startPreview();
// Log.i("Cam", "surface creator");
} catch (IOException e) {
// Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
// if (camera_selected != null) {
// camera_selected.stopPreview();
// camera_selected.release();
// camera_selected = null;
// }
}
public void surfaceChanged(SurfaceHolder arg_holder, int arg_format, int arg_width, int arg_height) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (holder_surface.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
camera_selected.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
camera_selected.setPreviewDisplay(holder_surface);
camera_selected.startPreview();
} catch (Exception e){
// Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
}
I'm working on a school project and have hit a road block and wanted to see if anyone can help point me in the right direction. We are trying to capture packets from an Android phone without rooting the phone. The only option that I have found was to use Android's VPNService to use it as a middle man to capture the packets.
The idea that I had was to create a button to capture the packets, on a onclick event.
package cybutech.com.datacapture;
import android.app.ActionBar;
import android.content.Intent;
import android.graphics.Color;
import android.net.VpnService;
import android.preference.PreferenceFragment;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
public class MainActivity extends AppCompatActivity {
private int capture = 1;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Toolbar myToolbar = (Toolbar) findViewById(R.id.my_toolbar);
setSupportActionBar(myToolbar);
final Button captureData = (Button)findViewById(R.id.capture);
captureData.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
if (capture == 1) {
captureData.setBackgroundColor(Color.RED);
captureData.setText("Stop Capture!");
Intent intent = VpnService.prepare(getApplicationContext());
if (intent != null) {
startActivityForResult(intent, 0);
} else {
onActivityResult(0, RESULT_OK, null);
}
capture *= -1;
} else {
captureData.setBackgroundColor(Color.GREEN);
captureData.setText("Start!");
capture *= -1;
}
}
});
}
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == RESULT_OK) {
Intent intent = new Intent(this, MyVpnService.class);
startService(intent);
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.action, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_settings:
startActivity(new Intent(this, Settings.class));
break;
default:
break;
}
return true;
}
}
package cybutech.com.datacapture;
import android.content.Intent;
import android.net.VpnService;
import android.os.ParcelFileDescriptor;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStreamReader;
import java.net.InetSocketAddress;
import java.nio.channels.DatagramChannel;
public class MyVpnService extends VpnService {
private Thread mThread;
private ParcelFileDescriptor mInterface;
//a. Configure a builder for the interface.
Builder builder = new Builder();
// Services interface
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
// Start a new session by creating a new thread.
mThread = new Thread(new Runnable() {
#Override
public void run() {
try {
//a. Configure the TUN and get the interface.
mInterface = builder.setSession("MyVPNService")
.addAddress("192.168.0.1", 24)
.addDnsServer("8.8.8.8")
.addRoute("0.0.0.0", 0).establish();
//b. Packets to be sent are queued in this input stream.
FileInputStream in = new FileInputStream(
mInterface.getFileDescriptor());
//b. Packets received need to be written to this output stream.
FileOutputStream out = new FileOutputStream(
mInterface.getFileDescriptor());
//c. The UDP channel can be used to pass/get ip package to/from server
DatagramChannel tunnel = DatagramChannel.open();
// Connect to the server, localhost is used for demonstration only.
tunnel.connect(new InetSocketAddress("127.0.0.1", 8087));
//d. Protect this socket, so package send by it will not be feedback to the vpn service.
protect(tunnel.socket());
//e. Use a loop to pass packets.
while (true) {
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
while(true) {
String line = reader.readLine();
if (line == null) {
System.out.print("NULLLLLLL");
break;
} else {
System.out.println("line is " + line);
}
// I am guessing that here after reading the packets, I need to forward them to the actual server.
Thread.sleep(100);
}
}
} catch (Exception e) {
// Catch any exception
e.printStackTrace();
} finally {
try {
if (mInterface != null) {
mInterface.close();
mInterface = null;
}
} catch (Exception e) {
}
}
}
}, "MyVpnRunnable");
//start the service
mThread.start();
return START_STICKY;
}
#Override
public void onDestroy() {
// TODO Auto-generated method stub
if (mThread != null) {
mThread.interrupt();
}
super.onDestroy();
}
}
I have been trying to get my app to stop playing music when the phone is ringing, but it isn't working. I've tried everything, but it seems impossible. Here's the code.
package com.beanie.samples.streaming;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import com.beanie.samples.streaming.R;
import com.beanie.samples.streaming.MyService;
import android.app.Activity;
import android.app.IntentService;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnBufferingUpdateListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.IBinder;
import android.os.Looper;
import android.os.Message;
import android.telephony.PhoneStateListener;
import android.telephony.TelephonyManager;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ProgressBar;
import android.widget.Toast;
public class HomeActivity extends Activity implements OnClickListener {
private static final String TAG = "MyServices";
private final static String RADIO_STATION_URL = "http://195.154.237.162:8936/";
private static final String START_STICKY = null;
Button buttonPlay, buttonStopPlay;
/** Called when the activity is first created.
* Keep this here all the application will stop working */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
initializeUIElements();
initializeMediaPlayer();
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
buttonPlay = (Button) findViewById(R.id.buttonPlay);
buttonStopPlay = (Button) findViewById(R.id.buttonStopPlay);
buttonPlay.setOnClickListener(this);
buttonStopPlay.setOnClickListener(this);
}
private ProgressBar playSeekBar;
private MediaPlayer player;
private InputStream recordingStream;
private RecorderThread recorderThread;
private boolean isRecording = false;
private void initializeUIElements() {
playSeekBar = (ProgressBar) findViewById(R.id.progressBar1);
playSeekBar.setMax(100);
playSeekBar.setVisibility(View.INVISIBLE);
buttonPlay = (Button) findViewById(R.id.buttonPlay);
buttonPlay.setOnClickListener(this);
buttonStopPlay = (Button) findViewById(R.id.buttonStopPlay);
buttonStopPlay.setEnabled(false);
buttonStopPlay.setOnClickListener(this);
}
public void startPlaying() {
buttonStopPlay.setEnabled(true);
buttonPlay.setEnabled(false);
playSeekBar.setVisibility(View.VISIBLE);
player.prepareAsync();
player.setOnPreparedListener(new OnPreparedListener() {
public void onPrepared(MediaPlayer mp) {
player.start();
}
});
}
private void onBufferingUpdate(MediaPlayer mp, int percent) {
playSeekBar.setSecondaryProgress(percent);
Toast.makeText(this, "Buffering ", percent).show();
Log.i("Buffering", "" + percent);
}
public class GetCallerInfoActivity extends Activity {
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
TelephonyManager telephonyManager = (TelephonyManager)getSystemService(Context.TELEPHONY_SERVICE);
// register PhoneStateListener
PhoneStateListener callStateListener = new PhoneStateListener() {
public void onCallStateChanged(int state, String incomingNumber) {
// If phone ringing
if (state==TelephonyManager.CALL_STATE_RINGING) {
stopPlaying();
}
// If incoming call received
if (state==TelephonyManager.CALL_STATE_OFFHOOK) {
stopPlaying();
}
if (state==TelephonyManager.CALL_STATE_IDLE) {
Toast.makeText(getApplicationContext(),"phone is neither ringing nor in a call", Toast.LENGTH_LONG).show();
}
}
};
telephonyManager.listen(callStateListener,PhoneStateListener.LISTEN_CALL_STATE);
}
}
public void onClick(View v) {
if (v == buttonPlay) {
startPlaying();
player.setLooping(false); // Set looping
} else if (v == buttonStopPlay) {
Log.d(TAG, "onClick: stopping srvice");
stopPlaying();
}
}
private void stopPlaying() {
if (player.isPlaying()) {
player.stop();
player.release();
initializeMediaPlayer();
}
buttonPlay.setEnabled(true);
buttonStopPlay.setEnabled(false);
playSeekBar.setVisibility(View.INVISIBLE);
stopRecording();
}
private void initializeMediaPlayer() {
player = new MediaPlayer();
try {
player.setDataSource(RADIO_STATION_URL);
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void startRecording() {
BufferedOutputStream writer = null;
try {
URL url = new URL(RADIO_STATION_URL);
URLConnection connection = url.openConnection();
final String FOLDER_PATH = Environment.getExternalStorageDirectory().getAbsolutePath()
+ File.separator + "Songs";
File folder = new File(FOLDER_PATH);
if (!folder.exists()) {
folder.mkdir();
}
writer = new BufferedOutputStream(new FileOutputStream(new File(FOLDER_PATH
+ File.separator + "sample.mp3")));
recordingStream = connection.getInputStream();
final int BUFFER_SIZE = 100;
byte[] buffer = new byte[BUFFER_SIZE];
while (recordingStream.read(buffer, 0, BUFFER_SIZE) != -1 && isRecording) {
writer.write(buffer, 0, BUFFER_SIZE);
writer.flush();
}
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
recordingStream.close();
writer.flush();
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void stopRecording() {
try {
isRecording = false;
if (recordingStream != null) {
recordingStream.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
private class RecorderThread extends Thread {
#Override
public void run() {
isRecording = true;
startRecording();
}
};
}
Could someone please help and implement this? I would appreciate and it and it would help a lot. Also, I have even helped myself by trying.
You need to use the TelephonyManager
mTelephonyMgr = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE);
mTelephonyMgr.listen(mPhoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);
The listener object can be created like this
private PhoneStateListener mPhoneStateListener = new PhoneStateListener() {
#Override
public void onCallStateChanged(int state, String incomingNumber) {
// Test for incoming call, dialing call, active or on hold
if (state==TelephonyManager.CALL_STATE_RINGING || state==TelephonyManager.CALL_STATE_OFFHOOK)
{
stop(); // Put here the code to stop your music
}
super.onCallStateChanged(state, incomingNumber);
}
};
When stopping, or closing your app, remember to call this.
mTelephonyMgr.listen(mPhoneStateListener, PhoneStateListener.LISTEN_NONE);
I have tried to get this working so that it plays in the background?? why won't it work it looks fine to me??
I am new to java/android development so hope someone can fix the issue and explain what I problem was so I can learn something
Yes I got it to stream so far but once you exit the app the music stops playing
For being new think im doing great must be since I have previous experience with PHP etc..
Thanks ;)
package com.beanie.samples.streaming;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import com.beanie.samples.streaming.R;
import com.beanie.samples.streaming.MyService;
import android.app.Activity;
import android.app.IntentService;
import android.app.Service;
import android.content.Intent;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnBufferingUpdateListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.IBinder;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ProgressBar;
import android.widget.Toast;
;
public class HomeActivity extends Activity implements OnClickListener {
private static final String TAG = "MyServices";
private final static String RADIO_STATION_URL = "http://195.154.237.162:8936/";
private static final String START_STICKY = null;
Button buttonPlay, buttonStopPlay;
/** Called when the activity is first created.
* Keep this here all the application will stop working */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
initializeUIElements();
initializeMediaPlayer();
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
buttonPlay = (Button) findViewById(R.id.buttonPlay);
buttonStopPlay = (Button) findViewById(R.id.buttonStopPlay);
buttonPlay.setOnClickListener(this);
buttonStopPlay.setOnClickListener(this);
}
private ProgressBar playSeekBar;
private MediaPlayer player;
private InputStream recordingStream;
private RecorderThread recorderThread;
private boolean isRecording = false;
private void initializeUIElements() {
playSeekBar = (ProgressBar) findViewById(R.id.progressBar1);
playSeekBar.setMax(100);
playSeekBar.setVisibility(View.INVISIBLE);
buttonPlay = (Button) findViewById(R.id.buttonPlay);
buttonPlay.setOnClickListener(this);
buttonStopPlay = (Button) findViewById(R.id.buttonStopPlay);
buttonStopPlay.setEnabled(false);
buttonStopPlay.setOnClickListener(this);
}
public void startPlaying() {
buttonStopPlay.setEnabled(true);
buttonPlay.setEnabled(false);
playSeekBar.setVisibility(View.VISIBLE);
player.prepareAsync();
player.setOnPreparedListener(new OnPreparedListener() {
public void onPrepared(MediaPlayer mp) {
player.start();
}
});
}
public void onClick(View v) {
if (v == buttonPlay) {
Toast.makeText(this, "My Service Started", Toast.LENGTH_LONG).show();
Log.d(TAG, "onClick: starting srvice");
startService(new Intent(this, MyService.class));
startPlaying();
player.setLooping(false); // Set looping
} else if (v == buttonStopPlay) {
Log.d(TAG, "onClick: stopping srvice");
stopService(new Intent(this, MyService.class));
stopPlaying();
}
}
private void stopPlaying() {
if (player.isPlaying()) {
player.stop();
player.release();
initializeMediaPlayer();
}
buttonPlay.setEnabled(true);
buttonStopPlay.setEnabled(false);
playSeekBar.setVisibility(View.INVISIBLE);
stopRecording();
}
private void initializeMediaPlayer() {
player = new MediaPlayer();
try {
player.setDataSource(RADIO_STATION_URL);
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
player.setOnBufferingUpdateListener(new OnBufferingUpdateListener() {
public void onBufferingUpdate(MediaPlayer mp, int percent) {
playSeekBar.setSecondaryProgress(percent);
Log.i("Buffering", "" + percent);
}
});
}
#Override
protected void onPause() {
super.onPause();
if (player.isPlaying()) {
player.stop();
}
}
private void startRecording() {
BufferedOutputStream writer = null;
try {
URL url = new URL(RADIO_STATION_URL);
URLConnection connection = url.openConnection();
final String FOLDER_PATH = Environment.getExternalStorageDirectory().getAbsolutePath()
+ File.separator + "Songs";
File folder = new File(FOLDER_PATH);
if (!folder.exists()) {
folder.mkdir();
}
writer = new BufferedOutputStream(new FileOutputStream(new File(FOLDER_PATH
+ File.separator + "sample.mp3")));
recordingStream = connection.getInputStream();
final int BUFFER_SIZE = 100;
byte[] buffer = new byte[BUFFER_SIZE];
while (recordingStream.read(buffer, 0, BUFFER_SIZE) != -1 && isRecording) {
writer.write(buffer, 0, BUFFER_SIZE);
writer.flush();
}
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
recordingStream.close();
writer.flush();
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void stopRecording() {
try {
isRecording = false;
if (recordingStream != null) {
recordingStream.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
private class RecorderThread extends Thread {
#Override
public void run() {
isRecording = true;
startRecording();
}
};
}
#Override
protected void onPause() {
super.onPause();
if (player.isPlaying()) {
player.stop();
}
}
well, it is stopping because you're asking it to stop.
When the activity goes to background it pauses.
You should run the stream from a service (that stays in the background without issues) and use a bound service to communicate between activity and service http://developer.android.com/guide/components/bound-services.html
I wrote code to capture image automatically at background.
For this, I made a thread and taking picture in that.
My code looks like
package com.camsharp;
import java.io.File;
import java.io.FileOutputStream;
import java.util.Timer;
import java.util.TimerTask;
import android.app.Activity;
import android.content.Context;
import android.hardware.Camera;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.widget.FrameLayout;
import android.widget.Toast;
public class MainActivity extends Activity {
private int cameraId = 0;
private Camera mCamera;
private CameraPreview mPreview;
String fileName = "tempImage.jpeg";
File file;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d("EXEC ", "EXECUTED ");
setContentView(R.layout.activity_main);
// Create an instance of Camera
mCamera = getCameraInstance(cameraId);
if (mCamera == null) {
Toast.makeText(
getApplicationContext(),
"The camera service is currently unavailable, please try again!",
Toast.LENGTH_LONG).show();
finish();
} else {
// Create our Preview view and set it as the content of our
// activity.
mPreview = new CameraPreview(this, mCamera);
FrameLayout frameLayout = (FrameLayout) findViewById(R.id.camera_preview);
frameLayout.addView(mPreview);
}
// start thread for these
MyTimerTask myTask = new MyTimerTask();
Timer myTimer = new Timer();
// public void schedule (TimerTask task, long delay, long period)
// Schedule a task for repeated fixed-delay execution after a specific
// delay.
//
// Parameters
// task the task to schedule.
// delay amount of time in milliseconds before first execution.
// period amount of time in milliseconds between subsequent executions.
myTimer.schedule(myTask, 3000, 1500);
}
class MyTimerTask extends TimerTask {
public void run() {
try {
Log.e("SUCCESS ", "IT IS OKAY ");
//mCamera.takePicture(null, null, null, mPictureCallback);
mCamera.takePicture(null, null, mPictureCallback);
file = new File(getFilesDir(), fileName);
System.out.println(file);
} catch (Exception e) {
Log.e("Error ", "EXCEPTION ");
e.printStackTrace();
}
Log.e("LOG ", "timer testing");
}
}
Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
public void onPictureTaken(byte[] imageData, Camera c) {
Log.e("Callback TAG", "Here in jpeg Callback");
if (imageData != null) {
FileOutputStream outputStream;
try {
outputStream = openFileOutput(fileName,
Context.MODE_PRIVATE);
outputStream.write(imageData);
outputStream.close();
// Intent intent = new Intent(SnapScreen.this,
// PreviewScreen.class);
// if (fromMessageReview == true) {
// intent.putExtra("fromMessageReview", "true");
// }
// startActivity(intent);
// overridePendingTransition(R.anim.slide_in,
// R.anim.slide_out);
finish();
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
#Override
protected void onDestroy() {
super.onDestroy();
releaseCamera();
}
/** A safe way to get an instance of the Camera object. */
public static Camera getCameraInstance(int cameraId) {
Camera c = null;
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
c = Camera.open(cameraId);
} else {
c = Camera.open();
}
} catch (Exception e) {
c = null;
}
return c; // returns null if camera is unavailable
}
private void releaseCamera() {
if (mCamera != null) {
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
}
Here, when Thread runs, it gives exception, java.lang.NullPointerException
I tried.
mCamera.startPreview();
before taking new image, but not use.
Can anyone identify where I am doing mistake and why?